mirror of https://github.com/jumpserver/jumpserver
commit
8757cc97ed
|
@ -21,6 +21,9 @@ jobs:
|
|||
push: false
|
||||
tags: jumpserver/core:test
|
||||
file: Dockerfile
|
||||
build-args: |
|
||||
APT_MIRROR=http://deb.debian.org
|
||||
PIP_MIRROR=https://pypi.org/simple
|
||||
cache-from: type=gha
|
||||
cache-to: type=gha,mode=max
|
||||
|
||||
|
|
|
@ -1,3 +1,3 @@
|
|||
from .account import *
|
||||
from .backup import *
|
||||
from .task import *
|
||||
from .template import *
|
||||
|
|
|
@ -1,20 +1,20 @@
|
|||
from django.shortcuts import get_object_or_404
|
||||
from rest_framework.decorators import action
|
||||
from rest_framework.generics import CreateAPIView, ListAPIView
|
||||
from rest_framework.generics import ListAPIView
|
||||
from rest_framework.response import Response
|
||||
|
||||
from accounts import serializers
|
||||
from accounts.filters import AccountFilterSet
|
||||
from accounts.models import Account
|
||||
from accounts.tasks import verify_accounts_connectivity
|
||||
from assets.models import Asset
|
||||
from authentication.const import ConfirmType
|
||||
from common.views.mixins import RecordViewLogMixin
|
||||
from common.permissions import UserConfirmation
|
||||
from common.views.mixins import RecordViewLogMixin
|
||||
from orgs.mixins.api import OrgBulkModelViewSet
|
||||
|
||||
__all__ = [
|
||||
'AccountViewSet', 'AccountSecretsViewSet', 'AccountTaskCreateAPI', 'AccountHistoriesSecretAPI'
|
||||
'AccountViewSet', 'AccountSecretsViewSet',
|
||||
'AccountHistoriesSecretAPI'
|
||||
]
|
||||
|
||||
from rbac.permissions import RBACPermission
|
||||
|
@ -28,15 +28,15 @@ class AccountViewSet(OrgBulkModelViewSet):
|
|||
'default': serializers.AccountSerializer,
|
||||
}
|
||||
rbac_perms = {
|
||||
'verify_account': 'assets.test_account',
|
||||
'partial_update': 'assets.change_accountsecret',
|
||||
'su_from_accounts': 'assets.view_account',
|
||||
'partial_update': ['accounts.change_account'],
|
||||
'su_from_accounts': 'accounts.view_account',
|
||||
}
|
||||
|
||||
@action(methods=['get'], detail=False, url_path='su-from-accounts')
|
||||
def su_from_accounts(self, request, *args, **kwargs):
|
||||
account_id = request.query_params.get('account')
|
||||
asset_id = request.query_params.get('asset')
|
||||
|
||||
if account_id:
|
||||
account = get_object_or_404(Account, pk=account_id)
|
||||
accounts = account.get_su_from_accounts()
|
||||
|
@ -45,17 +45,10 @@ class AccountViewSet(OrgBulkModelViewSet):
|
|||
accounts = asset.accounts.all()
|
||||
else:
|
||||
accounts = []
|
||||
accounts = self.filter_queryset(accounts)
|
||||
serializer = serializers.AccountSerializer(accounts, many=True)
|
||||
return Response(data=serializer.data)
|
||||
|
||||
@action(methods=['post'], detail=True, url_path='verify')
|
||||
def verify_account(self, request, *args, **kwargs):
|
||||
account = super().get_object()
|
||||
account_ids = [account.id]
|
||||
asset_ids = [account.asset_id]
|
||||
task = verify_accounts_connectivity.delay(account_ids, asset_ids)
|
||||
return Response(data={'task': task.id})
|
||||
|
||||
|
||||
class AccountSecretsViewSet(RecordViewLogMixin, AccountViewSet):
|
||||
"""
|
||||
|
@ -67,8 +60,8 @@ class AccountSecretsViewSet(RecordViewLogMixin, AccountViewSet):
|
|||
http_method_names = ['get', 'options']
|
||||
permission_classes = [RBACPermission, UserConfirmation.require(ConfirmType.MFA)]
|
||||
rbac_perms = {
|
||||
'list': 'assets.view_accountsecret',
|
||||
'retrieve': 'assets.view_accountsecret',
|
||||
'list': 'accounts.view_accountsecret',
|
||||
'retrieve': 'accounts.view_accountsecret',
|
||||
}
|
||||
|
||||
|
||||
|
@ -78,38 +71,20 @@ class AccountHistoriesSecretAPI(RecordViewLogMixin, ListAPIView):
|
|||
http_method_names = ['get', 'options']
|
||||
permission_classes = [RBACPermission, UserConfirmation.require(ConfirmType.MFA)]
|
||||
rbac_perms = {
|
||||
'list': 'assets.view_accountsecret',
|
||||
'list': 'accounts.view_accountsecret',
|
||||
}
|
||||
|
||||
def get_object(self):
|
||||
return get_object_or_404(Account, pk=self.kwargs.get('pk'))
|
||||
|
||||
def get_queryset(self):
|
||||
return self.model.objects.filter(id=self.kwargs.get('pk'))
|
||||
account = self.get_object()
|
||||
histories = account.history.all()
|
||||
last_history = account.history.first()
|
||||
if not last_history:
|
||||
return histories
|
||||
|
||||
|
||||
class AccountTaskCreateAPI(CreateAPIView):
|
||||
serializer_class = serializers.AccountTaskSerializer
|
||||
search_fields = AccountViewSet.search_fields
|
||||
filterset_class = AccountViewSet.filterset_class
|
||||
|
||||
def check_permissions(self, request):
|
||||
return request.user.has_perm('assets.test_assetconnectivity')
|
||||
|
||||
def get_accounts(self):
|
||||
queryset = Account.objects.all()
|
||||
queryset = self.filter_queryset(queryset)
|
||||
return queryset
|
||||
|
||||
def perform_create(self, serializer):
|
||||
accounts = self.get_accounts()
|
||||
account_ids = accounts.values_list('id', flat=True)
|
||||
asset_ids = [account.asset_id for account in accounts]
|
||||
task = verify_accounts_connectivity.delay(account_ids, asset_ids)
|
||||
data = getattr(serializer, '_data', {})
|
||||
data["task"] = task.id
|
||||
setattr(serializer, '_data', data)
|
||||
return task
|
||||
|
||||
def get_exception_handler(self):
|
||||
def handler(e, context):
|
||||
return Response({"error": str(e)}, status=400)
|
||||
|
||||
return handler
|
||||
if account.secret == last_history.secret \
|
||||
and account.secret_type == last_history.secret_type:
|
||||
histories = histories.exclude(history_id=last_history.history_id)
|
||||
return histories
|
||||
|
|
|
@ -0,0 +1,48 @@
|
|||
from rest_framework.generics import CreateAPIView
|
||||
from rest_framework.response import Response
|
||||
|
||||
from accounts import serializers
|
||||
from accounts.tasks import verify_accounts_connectivity_task, push_accounts_to_assets_task
|
||||
from assets.exceptions import NotSupportedTemporarilyError
|
||||
|
||||
__all__ = [
|
||||
'AccountsTaskCreateAPI',
|
||||
]
|
||||
|
||||
|
||||
class AccountsTaskCreateAPI(CreateAPIView):
|
||||
serializer_class = serializers.AccountTaskSerializer
|
||||
|
||||
def check_permissions(self, request):
|
||||
act = request.data.get('action')
|
||||
if act == 'push':
|
||||
code = 'accounts.push_account'
|
||||
else:
|
||||
code = 'accounts.verify_account'
|
||||
return request.user.has_perm(code)
|
||||
|
||||
def perform_create(self, serializer):
|
||||
data = serializer.validated_data
|
||||
accounts = data.get('accounts', [])
|
||||
account_ids = [str(a.id) for a in accounts]
|
||||
|
||||
if data['action'] == 'push':
|
||||
task = push_accounts_to_assets_task.delay(account_ids)
|
||||
else:
|
||||
account = accounts[0]
|
||||
asset = account.asset
|
||||
if not asset.auto_info['ansible_enabled'] or \
|
||||
not asset.auto_info['ping_enabled']:
|
||||
raise NotSupportedTemporarilyError()
|
||||
task = verify_accounts_connectivity_task.delay(account_ids)
|
||||
|
||||
data = getattr(serializer, '_data', {})
|
||||
data["task"] = task.id
|
||||
setattr(serializer, '_data', data)
|
||||
return task
|
||||
|
||||
def get_exception_handler(self):
|
||||
def handler(e, context):
|
||||
return Response({"error": str(e)}, status=400)
|
||||
|
||||
return handler
|
|
@ -23,6 +23,6 @@ class AccountTemplateSecretsViewSet(RecordViewLogMixin, AccountTemplateViewSet):
|
|||
http_method_names = ['get', 'options']
|
||||
permission_classes = [RBACPermission, UserConfirmation.require(ConfirmType.MFA)]
|
||||
rbac_perms = {
|
||||
'list': 'assets.view_accounttemplatesecret',
|
||||
'retrieve': 'assets.view_accounttemplatesecret',
|
||||
'list': 'accounts.view_accounttemplatesecret',
|
||||
'retrieve': 'accounts.view_accounttemplatesecret',
|
||||
}
|
||||
|
|
|
@ -1,3 +1,4 @@
|
|||
from .backup import *
|
||||
from .base import *
|
||||
from .change_secret import *
|
||||
from .gather_accounts import *
|
||||
|
|
|
@ -3,13 +3,13 @@
|
|||
from rest_framework import status, viewsets
|
||||
from rest_framework.response import Response
|
||||
|
||||
from orgs.mixins.api import OrgBulkModelViewSet
|
||||
from common.const.choices import Trigger
|
||||
from accounts import serializers
|
||||
from accounts.tasks import execute_account_backup_plan
|
||||
from accounts.models import (
|
||||
AccountBackupAutomation, AccountBackupExecution
|
||||
)
|
||||
from accounts.tasks import execute_account_backup_task
|
||||
from common.const.choices import Trigger
|
||||
from orgs.mixins.api import OrgBulkModelViewSet
|
||||
|
||||
__all__ = [
|
||||
'AccountBackupPlanViewSet', 'AccountBackupPlanExecutionViewSet'
|
||||
|
@ -20,7 +20,6 @@ class AccountBackupPlanViewSet(OrgBulkModelViewSet):
|
|||
model = AccountBackupAutomation
|
||||
filter_fields = ('name',)
|
||||
search_fields = filter_fields
|
||||
ordering_fields = ('name',)
|
||||
ordering = ('name',)
|
||||
serializer_class = serializers.AccountBackupSerializer
|
||||
|
||||
|
@ -39,5 +38,5 @@ class AccountBackupPlanExecutionViewSet(viewsets.ModelViewSet):
|
|||
serializer = self.get_serializer(data=request.data)
|
||||
serializer.is_valid(raise_exception=True)
|
||||
pid = serializer.data.get('plan')
|
||||
task = execute_account_backup_plan.delay(pid=pid, trigger=Trigger.manual)
|
||||
task = execute_account_backup_task.delay(pid=str(pid), trigger=Trigger.manual)
|
||||
return Response({'task': task.id}, status=status.HTTP_201_CREATED)
|
|
@ -3,10 +3,10 @@ from django.utils.translation import ugettext_lazy as _
|
|||
from rest_framework import status, mixins, viewsets
|
||||
from rest_framework.response import Response
|
||||
|
||||
from accounts.models import AutomationExecution
|
||||
from accounts.tasks import execute_account_automation_task
|
||||
from assets import serializers
|
||||
from assets.models import BaseAutomation
|
||||
from accounts.tasks import execute_automation
|
||||
from accounts.models import AutomationExecution
|
||||
from common.const.choices import Trigger
|
||||
from orgs.mixins import generics
|
||||
|
||||
|
@ -109,7 +109,7 @@ class AutomationExecutionViewSet(
|
|||
serializer = self.get_serializer(data=request.data)
|
||||
serializer.is_valid(raise_exception=True)
|
||||
automation = serializer.validated_data.get('automation')
|
||||
task = execute_automation.delay(
|
||||
pid=automation.pk, trigger=Trigger.manual, tp=self.tp
|
||||
task = execute_account_automation_task.delay(
|
||||
pid=str(automation.pk), trigger=Trigger.manual, tp=self.tp
|
||||
)
|
||||
return Response({'task': task.id}, status=status.HTTP_201_CREATED)
|
||||
|
|
|
@ -25,7 +25,6 @@ class ChangeSecretAutomationViewSet(OrgBulkModelViewSet):
|
|||
model = ChangeSecretAutomation
|
||||
filter_fields = ('name', 'secret_type', 'secret_strategy')
|
||||
search_fields = filter_fields
|
||||
ordering_fields = ('name',)
|
||||
serializer_class = serializers.ChangeSecretAutomationSerializer
|
||||
|
||||
|
||||
|
|
|
@ -1,13 +1,22 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
#
|
||||
from django.utils.translation import ugettext_lazy as _
|
||||
from rest_framework import status
|
||||
from rest_framework.decorators import action
|
||||
from rest_framework.response import Response
|
||||
|
||||
from accounts import serializers
|
||||
from accounts.const import AutomationTypes
|
||||
from accounts.const import Source
|
||||
from accounts.filters import GatheredAccountFilterSet
|
||||
from accounts.models import GatherAccountsAutomation
|
||||
from accounts.models import GatheredAccount
|
||||
from orgs.mixins.api import OrgBulkModelViewSet
|
||||
from .base import AutomationExecutionViewSet
|
||||
|
||||
__all__ = [
|
||||
'GatherAccountsAutomationViewSet', 'GatherAccountsExecutionViewSet'
|
||||
'GatherAccountsAutomationViewSet', 'GatherAccountsExecutionViewSet',
|
||||
'GatheredAccountViewSet'
|
||||
]
|
||||
|
||||
|
||||
|
@ -15,7 +24,6 @@ class GatherAccountsAutomationViewSet(OrgBulkModelViewSet):
|
|||
model = GatherAccountsAutomation
|
||||
filter_fields = ('name',)
|
||||
search_fields = filter_fields
|
||||
ordering_fields = ('name',)
|
||||
serializer_class = serializers.GatherAccountAutomationSerializer
|
||||
|
||||
|
||||
|
@ -32,3 +40,32 @@ class GatherAccountsExecutionViewSet(AutomationExecutionViewSet):
|
|||
queryset = super().get_queryset()
|
||||
queryset = queryset.filter(automation__type=self.tp)
|
||||
return queryset
|
||||
|
||||
|
||||
class GatheredAccountViewSet(OrgBulkModelViewSet):
|
||||
model = GatheredAccount
|
||||
search_fields = ('username',)
|
||||
filterset_class = GatheredAccountFilterSet
|
||||
serializer_classes = {
|
||||
'default': serializers.GatheredAccountSerializer,
|
||||
}
|
||||
rbac_perms = {
|
||||
'sync_account': 'assets.add_gatheredaccount',
|
||||
}
|
||||
|
||||
@action(methods=['post'], detail=True, url_path='sync')
|
||||
def sync_account(self, request, *args, **kwargs):
|
||||
gathered_account = super().get_object()
|
||||
asset = gathered_account.asset
|
||||
username = gathered_account.username
|
||||
accounts = asset.accounts.filter(username=username)
|
||||
|
||||
if accounts.exists():
|
||||
accounts.update(source=Source.COLLECTED)
|
||||
else:
|
||||
asset.accounts.model.objects.create(
|
||||
asset=asset, username=username,
|
||||
name=f'{username}-{_("Collected")}',
|
||||
source=Source.COLLECTED
|
||||
)
|
||||
return Response(status=status.HTTP_201_CREATED)
|
||||
|
|
|
@ -22,7 +22,6 @@ class PushAccountAutomationViewSet(OrgBulkModelViewSet):
|
|||
model = PushAccountAutomation
|
||||
filter_fields = ('name', 'secret_type', 'secret_strategy')
|
||||
search_fields = filter_fields
|
||||
ordering_fields = ('name',)
|
||||
serializer_class = serializers.PushAccountAutomationSerializer
|
||||
|
||||
|
||||
|
|
|
@ -7,4 +7,5 @@ class AccountsConfig(AppConfig):
|
|||
|
||||
def ready(self):
|
||||
from . import signal_handlers
|
||||
from . import tasks
|
||||
__all__ = signal_handlers
|
||||
|
|
|
@ -77,15 +77,10 @@ class AssetAccountHandler(BaseAccountHandler):
|
|||
return filename
|
||||
|
||||
@classmethod
|
||||
def create_data_map(cls, types: list):
|
||||
def create_data_map(cls, accounts):
|
||||
data_map = defaultdict(list)
|
||||
|
||||
# TODO 可以优化一下查询 在账号上做 category 的缓存 避免数据量大时连表操作
|
||||
qs = Account.objects.filter(
|
||||
asset__platform__type__in=types
|
||||
).annotate(type=F('asset__platform__type'))
|
||||
|
||||
if not qs.exists():
|
||||
if not accounts.exists():
|
||||
return data_map
|
||||
|
||||
type_dict = {}
|
||||
|
@ -93,18 +88,18 @@ class AssetAccountHandler(BaseAccountHandler):
|
|||
for j in i['children']:
|
||||
type_dict[j['value']] = j['display_name']
|
||||
|
||||
header_fields = cls.get_header_fields(AccountSecretSerializer(qs.first()))
|
||||
header_fields = cls.get_header_fields(AccountSecretSerializer(accounts.first()))
|
||||
account_type_map = defaultdict(list)
|
||||
for account in qs:
|
||||
for account in accounts:
|
||||
account_type_map[account.type].append(account)
|
||||
|
||||
data_map = {}
|
||||
for tp, accounts in account_type_map.items():
|
||||
for tp, _accounts in account_type_map.items():
|
||||
sheet_name = type_dict.get(tp, tp)
|
||||
data = AccountSecretSerializer(accounts, many=True).data
|
||||
data = AccountSecretSerializer(_accounts, many=True).data
|
||||
data_map.update(cls.add_rows(data, header_fields, sheet_name))
|
||||
|
||||
logger.info('\n\033[33m- 共收集 {} 条账号\033[0m'.format(qs.count()))
|
||||
logger.info('\n\033[33m- 共备份 {} 条账号\033[0m'.format(accounts.count()))
|
||||
return data_map
|
||||
|
||||
|
||||
|
@ -123,9 +118,8 @@ class AccountBackupHandler:
|
|||
# Print task start date
|
||||
time_start = time.time()
|
||||
files = []
|
||||
types = self.execution.types
|
||||
|
||||
data_map = AssetAccountHandler.create_data_map(types)
|
||||
accounts = self.execution.backup_accounts
|
||||
data_map = AssetAccountHandler.create_data_map(accounts)
|
||||
if not data_map:
|
||||
return files
|
||||
|
||||
|
|
|
@ -1,54 +1,11 @@
|
|||
from copy import deepcopy
|
||||
|
||||
from common.utils import get_logger
|
||||
from accounts.const import AutomationTypes, SecretType
|
||||
from assets.automations.base.manager import BasePlaybookManager
|
||||
from accounts.automations.methods import platform_automation_methods
|
||||
from assets.automations.base.manager import BasePlaybookManager
|
||||
from common.utils import get_logger
|
||||
|
||||
logger = get_logger(__name__)
|
||||
|
||||
|
||||
class PushOrVerifyHostCallbackMixin:
|
||||
execution: callable
|
||||
get_accounts: callable
|
||||
host_account_mapper: dict
|
||||
generate_public_key: callable
|
||||
generate_private_key_path: callable
|
||||
|
||||
def host_callback(self, host, asset=None, account=None, automation=None, path_dir=None, **kwargs):
|
||||
host = super().host_callback(host, asset=asset, account=account, automation=automation, **kwargs)
|
||||
if host.get('error'):
|
||||
return host
|
||||
|
||||
accounts = asset.accounts.all()
|
||||
accounts = self.get_accounts(account, accounts)
|
||||
|
||||
inventory_hosts = []
|
||||
for account in accounts:
|
||||
h = deepcopy(host)
|
||||
h['name'] += '_' + account.username
|
||||
self.host_account_mapper[h['name']] = account
|
||||
secret = account.secret
|
||||
|
||||
private_key_path = None
|
||||
if account.secret_type == SecretType.SSH_KEY:
|
||||
private_key_path = self.generate_private_key_path(secret, path_dir)
|
||||
secret = self.generate_public_key(secret)
|
||||
|
||||
h['secret_type'] = account.secret_type
|
||||
h['account'] = {
|
||||
'name': account.name,
|
||||
'username': account.username,
|
||||
'secret_type': account.secret_type,
|
||||
'secret': secret,
|
||||
'private_key_path': private_key_path
|
||||
}
|
||||
inventory_hosts.append(h)
|
||||
return inventory_hosts
|
||||
|
||||
|
||||
class AccountBasePlaybookManager(BasePlaybookManager):
|
||||
pass
|
||||
|
||||
@property
|
||||
def platform_automation_methods(self):
|
||||
|
|
|
@ -10,12 +10,12 @@
|
|||
login_password: "{{ jms_account.secret }}"
|
||||
login_host: "{{ jms_asset.address }}"
|
||||
login_port: "{{ jms_asset.port }}"
|
||||
login_database: "{{ jms_asset.specific.db_name }}"
|
||||
ssl: "{{ jms_asset.specific.use_ssl }}"
|
||||
ssl_ca_certs: "{{ jms_asset.specific.ca_cert }}"
|
||||
ssl_certfile: "{{ jms_asset.specific.client_key }}"
|
||||
login_database: "{{ jms_asset.spec_info.db_name }}"
|
||||
ssl: "{{ jms_asset.spec_info.use_ssl }}"
|
||||
ssl_ca_certs: "{{ jms_asset.secret_info.ca_cert }}"
|
||||
ssl_certfile: "{{ jms_asset.secret_info.client_key }}"
|
||||
connection_options:
|
||||
- tlsAllowInvalidHostnames: "{{ jms_asset.specific.allow_invalid_cert}}"
|
||||
- tlsAllowInvalidHostnames: "{{ jms_asset.spec_info.allow_invalid_cert}}"
|
||||
register: db_info
|
||||
|
||||
- name: Display MongoDB version
|
||||
|
@ -29,13 +29,13 @@
|
|||
login_password: "{{ jms_account.secret }}"
|
||||
login_host: "{{ jms_asset.address }}"
|
||||
login_port: "{{ jms_asset.port }}"
|
||||
login_database: "{{ jms_asset.specific.db_name }}"
|
||||
ssl: "{{ jms_asset.specific.use_ssl }}"
|
||||
ssl_ca_certs: "{{ jms_asset.specific.ca_cert }}"
|
||||
ssl_certfile: "{{ jms_asset.specific.client_key }}"
|
||||
login_database: "{{ jms_asset.spec_info.db_name }}"
|
||||
ssl: "{{ jms_asset.spec_info.use_ssl }}"
|
||||
ssl_ca_certs: "{{ jms_asset.secret_info.ca_cert }}"
|
||||
ssl_certfile: "{{ jms_asset.secret_info.client_key }}"
|
||||
connection_options:
|
||||
- tlsAllowInvalidHostnames: "{{ jms_asset.specific.allow_invalid_cert}}"
|
||||
db: "{{ jms_asset.specific.db_name }}"
|
||||
- tlsAllowInvalidHostnames: "{{ jms_asset.spec_info.allow_invalid_cert}}"
|
||||
db: "{{ jms_asset.spec_info.db_name }}"
|
||||
name: "{{ account.username }}"
|
||||
password: "{{ account.secret }}"
|
||||
when: db_info is succeeded
|
||||
|
@ -47,12 +47,12 @@
|
|||
login_password: "{{ account.secret }}"
|
||||
login_host: "{{ jms_asset.address }}"
|
||||
login_port: "{{ jms_asset.port }}"
|
||||
login_database: "{{ jms_asset.specific.db_name }}"
|
||||
ssl: "{{ jms_asset.specific.use_ssl }}"
|
||||
ssl_ca_certs: "{{ jms_asset.specific.ca_cert }}"
|
||||
ssl_certfile: "{{ jms_asset.specific.client_key }}"
|
||||
login_database: "{{ jms_asset.spec_info.db_name }}"
|
||||
ssl: "{{ jms_asset.spec_info.use_ssl }}"
|
||||
ssl_ca_certs: "{{ jms_asset.secret_info.ca_cert }}"
|
||||
ssl_certfile: "{{ jms_asset.secret_info.client_key }}"
|
||||
connection_options:
|
||||
- tlsAllowInvalidHostnames: "{{ jms_asset.specific.allow_invalid_cert}}"
|
||||
- tlsAllowInvalidHostnames: "{{ jms_asset.spec_info.allow_invalid_cert}}"
|
||||
when:
|
||||
- db_info is succeeded
|
||||
- change_info is succeeded
|
||||
- db_info is succeeded
|
||||
- change_info is succeeded
|
||||
|
|
|
@ -2,6 +2,7 @@
|
|||
gather_facts: no
|
||||
vars:
|
||||
ansible_python_interpreter: /usr/local/bin/python
|
||||
db_name: "{{ jms_asset.spec_info.db_name }}"
|
||||
|
||||
tasks:
|
||||
- name: Test MySQL connection
|
||||
|
@ -26,6 +27,7 @@
|
|||
name: "{{ account.username }}"
|
||||
password: "{{ account.secret }}"
|
||||
host: "%"
|
||||
priv: "{{ account.username + '.*:USAGE' if db_name == '' else db_name + '.*:ALL' }}"
|
||||
when: db_info is succeeded
|
||||
register: change_info
|
||||
|
||||
|
@ -37,5 +39,5 @@
|
|||
login_port: "{{ jms_asset.port }}"
|
||||
filter: version
|
||||
when:
|
||||
- db_info is succeeded
|
||||
- change_info is succeeded
|
||||
- db_info is succeeded
|
||||
- change_info is succeeded
|
|
@ -10,7 +10,7 @@
|
|||
login_password: "{{ jms_account.secret }}"
|
||||
login_host: "{{ jms_asset.address }}"
|
||||
login_port: "{{ jms_asset.port }}"
|
||||
login_database: "{{ jms_asset.specific.db_name }}"
|
||||
login_database: "{{ jms_asset.spec_info.db_name }}"
|
||||
mode: "{{ jms_account.mode }}"
|
||||
register: db_info
|
||||
|
||||
|
@ -25,7 +25,7 @@
|
|||
login_password: "{{ jms_account.secret }}"
|
||||
login_host: "{{ jms_asset.address }}"
|
||||
login_port: "{{ jms_asset.port }}"
|
||||
login_database: "{{ jms_asset.specific.db_name }}"
|
||||
login_database: "{{ jms_asset.spec_info.db_name }}"
|
||||
mode: "{{ jms_account.mode }}"
|
||||
name: "{{ account.username }}"
|
||||
password: "{{ account.secret }}"
|
||||
|
@ -38,8 +38,7 @@
|
|||
login_password: "{{ account.secret }}"
|
||||
login_host: "{{ jms_asset.address }}"
|
||||
login_port: "{{ jms_asset.port }}"
|
||||
login_database: "{{ jms_asset.specific.db_name }}"
|
||||
mode: "{{ account.mode }}"
|
||||
login_database: "{{ jms_asset.spec_info.db_name }}"
|
||||
when:
|
||||
- db_info is succeeded
|
||||
- change_info is succeeded
|
||||
- db_info is succeeded
|
||||
- change_info is succeeded
|
||||
|
|
|
@ -10,13 +10,14 @@
|
|||
login_password: "{{ jms_account.secret }}"
|
||||
login_host: "{{ jms_asset.address }}"
|
||||
login_port: "{{ jms_asset.port }}"
|
||||
login_db: "{{ jms_asset.specific.db_name }}"
|
||||
register: db_info
|
||||
login_db: "{{ jms_asset.spec_info.db_name }}"
|
||||
register: result
|
||||
failed_when: not result.is_available
|
||||
|
||||
- name: Display PostgreSQL version
|
||||
debug:
|
||||
var: db_info.server_version.full
|
||||
when: db_info is succeeded
|
||||
var: result.server_version.full
|
||||
when: result is succeeded
|
||||
|
||||
- name: Change PostgreSQL password
|
||||
community.postgresql.postgresql_user:
|
||||
|
@ -24,10 +25,11 @@
|
|||
login_password: "{{ jms_account.secret }}"
|
||||
login_host: "{{ jms_asset.address }}"
|
||||
login_port: "{{ jms_asset.port }}"
|
||||
db: "{{ jms_asset.specific.db_name }}"
|
||||
db: "{{ jms_asset.spec_info.db_name }}"
|
||||
name: "{{ account.username }}"
|
||||
password: "{{ account.secret }}"
|
||||
when: db_info is succeeded
|
||||
role_attr_flags: LOGIN
|
||||
when: result is succeeded
|
||||
register: change_info
|
||||
|
||||
- name: Verify password
|
||||
|
@ -36,7 +38,9 @@
|
|||
login_password: "{{ account.secret }}"
|
||||
login_host: "{{ jms_asset.address }}"
|
||||
login_port: "{{ jms_asset.port }}"
|
||||
db: "{{ jms_asset.specific.db_name }}"
|
||||
db: "{{ jms_asset.spec_info.db_name }}"
|
||||
when:
|
||||
- db_info is succeeded
|
||||
- change_info is succeeded
|
||||
- result is succeeded
|
||||
- change_info is succeeded
|
||||
register: result
|
||||
failed_when: not result.is_available
|
||||
|
|
|
@ -10,17 +10,28 @@
|
|||
login_password: "{{ jms_account.secret }}"
|
||||
login_host: "{{ jms_asset.address }}"
|
||||
login_port: "{{ jms_asset.port }}"
|
||||
name: '{{ jms_asset.specific.db_name }}'
|
||||
name: '{{ jms_asset.spec_info.db_name }}'
|
||||
script: |
|
||||
SELECT @@version
|
||||
register: db_info
|
||||
|
||||
- name: SQLServer version
|
||||
set_fact:
|
||||
info:
|
||||
version: "{{ db_info.query_results[0][0][0][0].splitlines()[0] }}"
|
||||
- debug:
|
||||
var: info
|
||||
- name: SQLServer version
|
||||
set_fact:
|
||||
info:
|
||||
version: "{{ db_info.query_results[0][0][0][0].splitlines()[0] }}"
|
||||
- debug:
|
||||
var: info
|
||||
|
||||
- name: Check whether SQLServer User exist
|
||||
community.general.mssql_script:
|
||||
login_user: "{{ jms_account.username }}"
|
||||
login_password: "{{ jms_account.secret }}"
|
||||
login_host: "{{ jms_asset.address }}"
|
||||
login_port: "{{ jms_asset.port }}"
|
||||
name: '{{ jms_asset.spec_info.db_name }}'
|
||||
script: "SELECT 1 from sys.sql_logins WHERE name='{{ account.username }}';"
|
||||
when: db_info is succeeded
|
||||
register: user_exist
|
||||
|
||||
- name: Change SQLServer password
|
||||
community.general.mssql_script:
|
||||
|
@ -28,9 +39,20 @@
|
|||
login_password: "{{ jms_account.secret }}"
|
||||
login_host: "{{ jms_asset.address }}"
|
||||
login_port: "{{ jms_asset.port }}"
|
||||
name: '{{ jms_asset.specific.db_name }}'
|
||||
name: '{{ jms_asset.spec_info.db_name }}'
|
||||
script: "ALTER LOGIN {{ account.username }} WITH PASSWORD = '{{ account.secret }}'; select @@version"
|
||||
when: db_info is succeeded
|
||||
when: user_exist.query_results[0] | length != 0
|
||||
register: change_info
|
||||
|
||||
- name: Add SQLServer user
|
||||
community.general.mssql_script:
|
||||
login_user: "{{ jms_account.username }}"
|
||||
login_password: "{{ jms_account.secret }}"
|
||||
login_host: "{{ jms_asset.address }}"
|
||||
login_port: "{{ jms_asset.port }}"
|
||||
name: '{{ jms_asset.spec_info.db_name }}'
|
||||
script: "CREATE LOGIN {{ account.username }} WITH PASSWORD = '{{ account.secret }}'; select @@version"
|
||||
when: user_exist.query_results[0] | length == 0
|
||||
register: change_info
|
||||
|
||||
- name: Verify password
|
||||
|
@ -39,9 +61,9 @@
|
|||
login_password: "{{ account.secret }}"
|
||||
login_host: "{{ jms_asset.address }}"
|
||||
login_port: "{{ jms_asset.port }}"
|
||||
name: '{{ jms_asset.specific.db_name }}'
|
||||
name: '{{ jms_asset.spec_info.db_name }}'
|
||||
script: |
|
||||
SELECT @@version
|
||||
when:
|
||||
- db_info is succeeded
|
||||
- change_info is succeeded
|
||||
- db_info is succeeded
|
||||
- change_info is succeeded
|
||||
|
|
|
@ -0,0 +1,54 @@
|
|||
- hosts: demo
|
||||
gather_facts: no
|
||||
tasks:
|
||||
- name: Test privileged account
|
||||
ansible.builtin.ping:
|
||||
|
||||
- name: Change password
|
||||
ansible.builtin.user:
|
||||
name: "{{ account.username }}"
|
||||
password: "{{ account.secret | password_hash('des') }}"
|
||||
update_password: always
|
||||
when: secret_type == "password"
|
||||
|
||||
- name: create user If it already exists, no operation will be performed
|
||||
ansible.builtin.user:
|
||||
name: "{{ account.username }}"
|
||||
when: secret_type == "ssh_key"
|
||||
|
||||
- name: remove jumpserver ssh key
|
||||
ansible.builtin.lineinfile:
|
||||
dest: "{{ kwargs.dest }}"
|
||||
regexp: "{{ kwargs.regexp }}"
|
||||
state: absent
|
||||
when:
|
||||
- secret_type == "ssh_key"
|
||||
- kwargs.strategy == "set_jms"
|
||||
|
||||
- name: Change SSH key
|
||||
ansible.builtin.authorized_key:
|
||||
user: "{{ account.username }}"
|
||||
key: "{{ account.secret }}"
|
||||
exclusive: "{{ kwargs.exclusive }}"
|
||||
when: secret_type == "ssh_key"
|
||||
|
||||
- name: Refresh connection
|
||||
ansible.builtin.meta: reset_connection
|
||||
|
||||
- name: Verify password
|
||||
ansible.builtin.ping:
|
||||
become: no
|
||||
vars:
|
||||
ansible_user: "{{ account.username }}"
|
||||
ansible_password: "{{ account.secret }}"
|
||||
ansible_become: no
|
||||
when: secret_type == "password"
|
||||
|
||||
- name: Verify SSH key
|
||||
ansible.builtin.ping:
|
||||
become: no
|
||||
vars:
|
||||
ansible_user: "{{ account.username }}"
|
||||
ansible_ssh_private_key_file: "{{ account.private_key_path }}"
|
||||
ansible_become: no
|
||||
when: secret_type == "ssh_key"
|
|
@ -0,0 +1,6 @@
|
|||
id: change_secret_aix
|
||||
name: Change secret for aix
|
||||
category: host
|
||||
type:
|
||||
- AIX
|
||||
method: change_secret
|
|
@ -3,10 +3,6 @@
|
|||
tasks:
|
||||
- name: Test privileged account
|
||||
ansible.builtin.ping:
|
||||
#
|
||||
# - name: print variables
|
||||
# debug:
|
||||
# msg: "Username: {{ account.username }}, Secret: {{ account.secret }}, Secret type: {{ secret_type }}"
|
||||
|
||||
- name: Change password
|
||||
ansible.builtin.user:
|
||||
|
@ -26,8 +22,8 @@
|
|||
regexp: "{{ kwargs.regexp }}"
|
||||
state: absent
|
||||
when:
|
||||
- secret_type == "ssh_key"
|
||||
- kwargs.strategy == "set_jms"
|
||||
- secret_type == "ssh_key"
|
||||
- kwargs.strategy == "set_jms"
|
||||
|
||||
- name: Change SSH key
|
||||
ansible.builtin.authorized_key:
|
||||
|
|
|
@ -8,10 +8,18 @@
|
|||
# debug:
|
||||
# msg: "Username: {{ account.username }}, Password: {{ account.secret }}"
|
||||
|
||||
|
||||
- name: Get groups of a Windows user
|
||||
ansible.windows.win_user:
|
||||
name: "{{ jms_account.username }}"
|
||||
register: user_info
|
||||
|
||||
- name: Change password
|
||||
ansible.windows.win_user:
|
||||
name: "{{ account.username }}"
|
||||
password: "{{ account.secret }}"
|
||||
groups: "{{ user_info.groups[0].name }}"
|
||||
groups_action: add
|
||||
update_password: always
|
||||
when: account.secret_type == "password"
|
||||
|
||||
|
|
|
@ -1,27 +1,30 @@
|
|||
import os
|
||||
import time
|
||||
from copy import deepcopy
|
||||
from openpyxl import Workbook
|
||||
from collections import defaultdict
|
||||
from copy import deepcopy
|
||||
|
||||
from django.conf import settings
|
||||
from django.utils import timezone
|
||||
from openpyxl import Workbook
|
||||
|
||||
from users.models import User
|
||||
from accounts.const import AutomationTypes, SecretType, SSHKeyStrategy, SecretStrategy
|
||||
from accounts.models import ChangeSecretRecord
|
||||
from accounts.notifications import ChangeSecretExecutionTaskMsg
|
||||
from accounts.serializers import ChangeSecretRecordBackUpSerializer
|
||||
from accounts.const import AutomationTypes, SecretType, SSHKeyStrategy, SecretStrategy
|
||||
from assets.const import HostTypes
|
||||
from common.utils import get_logger, lazyproperty
|
||||
from common.utils.file import encrypt_and_compress_zip_file
|
||||
from common.utils.timezone import local_now_display
|
||||
from ...utils import SecretGenerator
|
||||
from users.models import User
|
||||
from ..base.manager import AccountBasePlaybookManager
|
||||
from ...utils import SecretGenerator
|
||||
|
||||
logger = get_logger(__name__)
|
||||
|
||||
|
||||
class ChangeSecretManager(AccountBasePlaybookManager):
|
||||
ansible_account_prefer = ''
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
super().__init__(*args, **kwargs)
|
||||
self.method_hosts_mapper = defaultdict(list)
|
||||
|
@ -33,18 +36,12 @@ class ChangeSecretManager(AccountBasePlaybookManager):
|
|||
'ssh_key_change_strategy', SSHKeyStrategy.add
|
||||
)
|
||||
self.snapshot_account_usernames = self.execution.snapshot['accounts']
|
||||
self._password_generated = None
|
||||
self._ssh_key_generated = None
|
||||
self.name_recorder_mapper = {} # 做个映射,方便后面处理
|
||||
|
||||
@classmethod
|
||||
def method_type(cls):
|
||||
return AutomationTypes.change_secret
|
||||
|
||||
@lazyproperty
|
||||
def related_accounts(self):
|
||||
pass
|
||||
|
||||
def get_kwargs(self, account, secret):
|
||||
kwargs = {}
|
||||
if self.secret_type != SecretType.SSH_KEY:
|
||||
|
@ -89,15 +86,26 @@ class ChangeSecretManager(AccountBasePlaybookManager):
|
|||
accounts = accounts.filter(username__in=self.snapshot_account_usernames)
|
||||
|
||||
accounts = accounts.filter(secret_type=self.secret_type)
|
||||
if not accounts:
|
||||
print('没有发现待改密账号: %s 用户名: %s 类型: %s' % (
|
||||
asset.name, self.snapshot_account_usernames, self.secret_type
|
||||
))
|
||||
return []
|
||||
|
||||
method_attr = getattr(automation, self.method_type() + '_method')
|
||||
method_hosts = self.method_hosts_mapper[method_attr]
|
||||
method_hosts = [h for h in method_hosts if h != host['name']]
|
||||
inventory_hosts = []
|
||||
records = []
|
||||
host['secret_type'] = self.secret_type
|
||||
|
||||
if asset.type == HostTypes.WINDOWS and self.secret_type == SecretType.SSH_KEY:
|
||||
print(f'Windows {asset} does not support ssh key push \n')
|
||||
return inventory_hosts
|
||||
|
||||
for account in accounts:
|
||||
h = deepcopy(host)
|
||||
h['name'] += '_' + account.username
|
||||
h['name'] += '(' + account.username + ')'
|
||||
new_secret = self.get_secret()
|
||||
|
||||
recorder = ChangeSecretRecord(
|
||||
|
@ -135,8 +143,10 @@ class ChangeSecretManager(AccountBasePlaybookManager):
|
|||
recorder.status = 'success'
|
||||
recorder.date_finished = timezone.now()
|
||||
recorder.save()
|
||||
|
||||
account = recorder.account
|
||||
if not account:
|
||||
print("Account not found, deleted ?")
|
||||
return
|
||||
account.secret = recorder.new_secret
|
||||
account.save(update_fields=['secret'])
|
||||
|
||||
|
@ -152,7 +162,16 @@ class ChangeSecretManager(AccountBasePlaybookManager):
|
|||
def on_runner_failed(self, runner, e):
|
||||
logger.error("Change secret error: ", e)
|
||||
|
||||
def check_secret(self):
|
||||
if self.secret_strategy == SecretStrategy.custom \
|
||||
and not self.execution.snapshot['secret']:
|
||||
print('Custom secret is empty')
|
||||
return False
|
||||
return True
|
||||
|
||||
def run(self, *args, **kwargs):
|
||||
if not self.check_secret():
|
||||
return
|
||||
super().run(*args, **kwargs)
|
||||
recorders = self.name_recorder_mapper.values()
|
||||
recorders = list(recorders)
|
||||
|
|
|
@ -10,12 +10,12 @@
|
|||
login_password: "{{ jms_account.secret }}"
|
||||
login_host: "{{ jms_asset.address }}"
|
||||
login_port: "{{ jms_asset.port }}"
|
||||
login_database: "{{ jms_asset.specific.db_name }}"
|
||||
ssl: "{{ jms_asset.specific.use_ssl }}"
|
||||
ssl_ca_certs: "{{ jms_asset.specific.ca_cert }}"
|
||||
ssl_certfile: "{{ jms_asset.specific.client_key }}"
|
||||
login_database: "{{ jms_asset.spec_info.db_name }}"
|
||||
ssl: "{{ jms_asset.spec_info.use_ssl }}"
|
||||
ssl_ca_certs: "{{ jms_asset.secret_info.ca_cert }}"
|
||||
ssl_certfile: "{{ jms_asset.secret_info.client_key }}"
|
||||
connection_options:
|
||||
- tlsAllowInvalidHostnames: "{{ jms_asset.specific.allow_invalid_cert}}"
|
||||
- tlsAllowInvalidHostnames: "{{ jms_asset.spec_info.allow_invalid_cert}}"
|
||||
filter: users
|
||||
register: db_info
|
||||
|
||||
|
|
|
@ -10,7 +10,7 @@
|
|||
login_password: "{{ jms_account.secret }}"
|
||||
login_host: "{{ jms_asset.address }}"
|
||||
login_port: "{{ jms_asset.port }}"
|
||||
login_database: "{{ jms_asset.specific.db_name }}"
|
||||
login_database: "{{ jms_asset.spec_info.db_name }}"
|
||||
mode: "{{ jms_account.mode }}"
|
||||
filter: users
|
||||
register: db_info
|
||||
|
|
|
@ -10,7 +10,7 @@
|
|||
login_password: "{{ jms_account.secret }}"
|
||||
login_host: "{{ jms_asset.address }}"
|
||||
login_port: "{{ jms_asset.port }}"
|
||||
login_db: "{{ jms_asset.specific.db_name }}"
|
||||
login_db: "{{ jms_asset.spec_info.db_name }}"
|
||||
filter: "roles"
|
||||
register: db_info
|
||||
|
||||
|
|
|
@ -30,6 +30,10 @@ class GatherAccountsFilter:
|
|||
result = {}
|
||||
for line in info:
|
||||
data = line.split('@')
|
||||
if len(data) == 1:
|
||||
result[line] = {}
|
||||
continue
|
||||
|
||||
if len(data) != 3:
|
||||
continue
|
||||
username, address, dt = data
|
||||
|
|
|
@ -4,8 +4,13 @@
|
|||
- name: Gather posix account
|
||||
ansible.builtin.shell:
|
||||
cmd: >
|
||||
users=$(getent passwd | grep -v nologin | grep -v shutdown | awk -F":" '{ print $1 }');for i in $users;
|
||||
do last -w -F $i -1 | head -1 | grep -v ^$ | awk '{ print $1"@"$3"@"$5,$6,$7,$8 }';done
|
||||
users=$(getent passwd | grep -v nologin | grep -v shutdown | awk -F":" '{ print $1 }');for i in $users;
|
||||
do k=$(last -w -F $i -1 | head -1 | grep -v ^$ | awk '{ print $1"@"$3"@"$5,$6,$7,$8 }')
|
||||
if [ -n "$k" ]; then
|
||||
echo $k
|
||||
else
|
||||
echo $i
|
||||
fi;done
|
||||
register: result
|
||||
|
||||
- name: Define info by set_fact
|
||||
|
|
|
@ -2,8 +2,7 @@
|
|||
gather_facts: no
|
||||
tasks:
|
||||
- name: Gather posix account
|
||||
ansible.builtin.win_shell:
|
||||
cmd: net user
|
||||
ansible.builtin.win_shell: net user
|
||||
register: result
|
||||
|
||||
- name: Define info by set_fact
|
||||
|
|
|
@ -1,7 +1,6 @@
|
|||
from django.utils.translation import ugettext_lazy as _
|
||||
|
||||
from accounts.const import AutomationTypes
|
||||
from accounts.models import GatheredAccount
|
||||
from common.utils import get_logger
|
||||
from accounts.const import AutomationTypes, Source
|
||||
from orgs.utils import tmp_to_org
|
||||
from .filter import GatherAccountsFilter
|
||||
from ..base.manager import AccountBasePlaybookManager
|
||||
|
@ -28,32 +27,24 @@ class GatherAccountsManager(AccountBasePlaybookManager):
|
|||
return result
|
||||
|
||||
@staticmethod
|
||||
def bulk_create_accounts(asset, result):
|
||||
account_objs = []
|
||||
account_model = asset.accounts.model
|
||||
account_usernames = set(asset.accounts.values_list('username', flat=True))
|
||||
def update_or_create_gathered_accounts(asset, result):
|
||||
with tmp_to_org(asset.org_id):
|
||||
accounts_dict = {}
|
||||
GatheredAccount.objects.filter(asset=asset, present=True).update(present=False)
|
||||
for username, data in result.items():
|
||||
comment = ''
|
||||
d = {'asset': asset, 'username': username, 'name': username, 'source': Source.COLLECTED}
|
||||
d = {'asset': asset, 'username': username, 'present': True}
|
||||
if data.get('date'):
|
||||
comment += f"{_('Date last login')}: {data['date']}\n "
|
||||
d['date_last_login'] = data['date']
|
||||
if data.get('address'):
|
||||
comment += f"{_('IP last login')}: {data['address'][:32]}"
|
||||
d['comment'] = comment
|
||||
accounts_dict[username] = d
|
||||
for username, data in accounts_dict.items():
|
||||
if username in account_usernames:
|
||||
continue
|
||||
account_objs.append(account_model(**data))
|
||||
account_model.objects.bulk_create(account_objs)
|
||||
d['address_last_login'] = data['address'][:32]
|
||||
GatheredAccount.objects.update_or_create(
|
||||
defaults=d, asset=asset, username=username,
|
||||
)
|
||||
|
||||
def on_host_success(self, host, result):
|
||||
info = result.get('debug', {}).get('res', {}).get('info', {})
|
||||
asset = self.host_asset_mapper.get(host)
|
||||
if asset and info:
|
||||
result = self.filter_success_result(host, info)
|
||||
self.bulk_create_accounts(asset, result)
|
||||
result = self.filter_success_result(asset.type, info)
|
||||
self.update_or_create_gathered_accounts(asset, result)
|
||||
else:
|
||||
logger.error("Not found info".format(host))
|
||||
|
|
|
@ -1,26 +1,25 @@
|
|||
from copy import deepcopy
|
||||
|
||||
from django.db.models import QuerySet
|
||||
|
||||
from common.utils import get_logger
|
||||
from accounts.const import AutomationTypes
|
||||
from accounts.const import AutomationTypes, SecretType
|
||||
from accounts.models import Account
|
||||
from ..base.manager import PushOrVerifyHostCallbackMixin, AccountBasePlaybookManager
|
||||
from assets.const import HostTypes
|
||||
from common.utils import get_logger
|
||||
from ..base.manager import AccountBasePlaybookManager
|
||||
from ..change_secret.manager import ChangeSecretManager
|
||||
|
||||
logger = get_logger(__name__)
|
||||
|
||||
|
||||
class PushAccountManager(PushOrVerifyHostCallbackMixin, AccountBasePlaybookManager):
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
super().__init__(*args, **kwargs)
|
||||
self.secret_type = self.execution.snapshot['secret_type']
|
||||
self.host_account_mapper = {}
|
||||
class PushAccountManager(ChangeSecretManager, AccountBasePlaybookManager):
|
||||
ansible_account_prefer = ''
|
||||
|
||||
@classmethod
|
||||
def method_type(cls):
|
||||
return AutomationTypes.push_account
|
||||
|
||||
def create_nonlocal_accounts(self, accounts, snapshot_account_usernames, asset):
|
||||
secret = self.execution.snapshot['secret']
|
||||
secret_type = self.secret_type
|
||||
usernames = accounts.filter(secret_type=secret_type).values_list(
|
||||
'username', flat=True
|
||||
|
@ -29,7 +28,7 @@ class PushAccountManager(PushOrVerifyHostCallbackMixin, AccountBasePlaybookManag
|
|||
create_account_objs = [
|
||||
Account(
|
||||
name=f'{username}-{secret_type}', username=username,
|
||||
secret=secret, secret_type=secret_type, asset=asset,
|
||||
secret_type=secret_type, asset=asset,
|
||||
)
|
||||
for username in create_usernames
|
||||
]
|
||||
|
@ -37,7 +36,7 @@ class PushAccountManager(PushOrVerifyHostCallbackMixin, AccountBasePlaybookManag
|
|||
|
||||
def get_accounts(self, privilege_account, accounts: QuerySet):
|
||||
if not privilege_account:
|
||||
logger.debug(f'not privilege account')
|
||||
print(f'not privilege account')
|
||||
return []
|
||||
snapshot_account_usernames = self.execution.snapshot['accounts']
|
||||
if '*' in snapshot_account_usernames:
|
||||
|
@ -50,6 +49,73 @@ class PushAccountManager(PushOrVerifyHostCallbackMixin, AccountBasePlaybookManag
|
|||
)
|
||||
return accounts
|
||||
|
||||
def host_callback(self, host, asset=None, account=None, automation=None, path_dir=None, **kwargs):
|
||||
host = super(ChangeSecretManager, self).host_callback(
|
||||
host, asset=asset, account=account, automation=automation,
|
||||
path_dir=path_dir, **kwargs
|
||||
)
|
||||
if host.get('error'):
|
||||
return host
|
||||
|
||||
accounts = asset.accounts.all()
|
||||
accounts = self.get_accounts(account, accounts)
|
||||
inventory_hosts = []
|
||||
host['secret_type'] = self.secret_type
|
||||
if asset.type == HostTypes.WINDOWS and self.secret_type == SecretType.SSH_KEY:
|
||||
msg = f'Windows {asset} does not support ssh key push \n'
|
||||
print(msg)
|
||||
return inventory_hosts
|
||||
|
||||
for account in accounts:
|
||||
h = deepcopy(host)
|
||||
h['name'] += '(' + account.username + ')'
|
||||
new_secret = self.get_secret()
|
||||
|
||||
self.name_recorder_mapper[h['name']] = {
|
||||
'account': account, 'new_secret': new_secret,
|
||||
}
|
||||
|
||||
private_key_path = None
|
||||
if self.secret_type == SecretType.SSH_KEY:
|
||||
private_key_path = self.generate_private_key_path(new_secret, path_dir)
|
||||
new_secret = self.generate_public_key(new_secret)
|
||||
|
||||
h['kwargs'] = self.get_kwargs(account, new_secret)
|
||||
h['account'] = {
|
||||
'name': account.name,
|
||||
'username': account.username,
|
||||
'secret_type': account.secret_type,
|
||||
'secret': new_secret,
|
||||
'private_key_path': private_key_path
|
||||
}
|
||||
if asset.platform.type == 'oracle':
|
||||
h['account']['mode'] = 'sysdba' if account.privileged else None
|
||||
inventory_hosts.append(h)
|
||||
return inventory_hosts
|
||||
|
||||
def on_host_success(self, host, result):
|
||||
account_info = self.name_recorder_mapper.get(host)
|
||||
if not account_info:
|
||||
return
|
||||
|
||||
account = account_info['account']
|
||||
new_secret = account_info['new_secret']
|
||||
if not account:
|
||||
return
|
||||
account.secret = new_secret
|
||||
account.save(update_fields=['secret'])
|
||||
|
||||
def on_host_error(self, host, error, result):
|
||||
pass
|
||||
|
||||
def on_runner_failed(self, runner, e):
|
||||
logger.error("Pust account error: ", e)
|
||||
|
||||
def run(self, *args, **kwargs):
|
||||
if not self.check_secret():
|
||||
return
|
||||
super().run(*args, **kwargs)
|
||||
|
||||
# @classmethod
|
||||
# def trigger_by_asset_create(cls, asset):
|
||||
# automations = PushAccountAutomation.objects.filter(
|
||||
|
|
|
@ -6,13 +6,13 @@
|
|||
tasks:
|
||||
- name: Verify account
|
||||
mongodb_ping:
|
||||
login_user: "{{ jms_account.username }}"
|
||||
login_password: "{{ jms_account.secret }}"
|
||||
login_user: "{{ account.username }}"
|
||||
login_password: "{{ account.secret }}"
|
||||
login_host: "{{ jms_asset.address }}"
|
||||
login_port: "{{ jms_asset.port }}"
|
||||
login_database: "{{ jms_asset.specific.db_name }}"
|
||||
ssl: "{{ jms_asset.specific.use_ssl }}"
|
||||
ssl_ca_certs: "{{ jms_asset.specific.ca_cert }}"
|
||||
ssl_certfile: "{{ jms_asset.specific.client_key }}"
|
||||
login_database: "{{ jms_asset.spec_info.db_name }}"
|
||||
ssl: "{{ jms_asset.spec_info.use_ssl }}"
|
||||
ssl_ca_certs: "{{ jms_asset.secret_info.ca_cert }}"
|
||||
ssl_certfile: "{{ jms_asset.secret_info.client_key }}"
|
||||
connection_options:
|
||||
- tlsAllowInvalidHostnames: "{{ jms_asset.specific.allow_invalid_cert}}"
|
||||
- tlsAllowInvalidHostnames: "{{ jms_asset.spec_info.allow_invalid_cert}}"
|
||||
|
|
|
@ -6,9 +6,9 @@
|
|||
tasks:
|
||||
- name: Verify account
|
||||
oracle_ping:
|
||||
login_user: "{{ jms_account.username }}"
|
||||
login_password: "{{ jms_account.secret }}"
|
||||
login_user: "{{ account.username }}"
|
||||
login_password: "{{ account.secret }}"
|
||||
login_host: "{{ jms_asset.address }}"
|
||||
login_port: "{{ jms_asset.port }}"
|
||||
login_database: "{{ jms_asset.specific.db_name }}"
|
||||
mode: "{{ jms_account.mode }}"
|
||||
login_database: "{{ jms_asset.spec_info.db_name }}"
|
||||
mode: "{{ account.mode }}"
|
||||
|
|
|
@ -3,6 +3,7 @@
|
|||
vars:
|
||||
ansible_python_interpreter: /usr/local/bin/python
|
||||
|
||||
|
||||
tasks:
|
||||
- name: Verify account
|
||||
community.postgresql.postgresql_ping:
|
||||
|
@ -10,4 +11,6 @@
|
|||
login_password: "{{ account.secret }}"
|
||||
login_host: "{{ jms_asset.address }}"
|
||||
login_port: "{{ jms_asset.port }}"
|
||||
db: "{{ jms_asset.specific.db_name }}"
|
||||
db: "{{ jms_asset.spec_info.db_name }}"
|
||||
register: result
|
||||
failed_when: not result.is_available
|
||||
|
|
|
@ -6,10 +6,10 @@
|
|||
tasks:
|
||||
- name: Verify account
|
||||
community.general.mssql_script:
|
||||
login_user: "{{ jms_account.username }}"
|
||||
login_password: "{{ jms_account.secret }}"
|
||||
login_user: "{{ account.username }}"
|
||||
login_password: "{{ account.secret }}"
|
||||
login_host: "{{ jms_asset.address }}"
|
||||
login_port: "{{ jms_asset.port }}"
|
||||
name: '{{ jms_asset.specific.db_name }}'
|
||||
name: '{{ jms_asset.spec_info.db_name }}'
|
||||
script: |
|
||||
SELECT @@version
|
||||
|
|
|
@ -1,11 +1,11 @@
|
|||
- hosts: demo
|
||||
gather_facts: no
|
||||
tasks:
|
||||
- name: Verify account
|
||||
ansible.builtin.ping:
|
||||
- name: Verify account connectivity
|
||||
become: no
|
||||
ansible.builtin.ping:
|
||||
vars:
|
||||
ansible_become: no
|
||||
ansible_user: "{{ account.username }}"
|
||||
ansible_password: "{{ account.secret }}"
|
||||
ansible_ssh_private_key_file: "{{ account.private_key_path }}"
|
||||
ansible_become: no
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
- hosts: windows
|
||||
gather_facts: yes
|
||||
gather_facts: no
|
||||
tasks:
|
||||
- name: Verify account
|
||||
ansible.windows.win_ping:
|
||||
|
|
|
@ -1,18 +1,67 @@
|
|||
import os
|
||||
from copy import deepcopy
|
||||
|
||||
from django.db.models import QuerySet
|
||||
|
||||
from accounts.const import AutomationTypes, Connectivity, SecretType
|
||||
from common.utils import get_logger
|
||||
from accounts.const import AutomationTypes, Connectivity
|
||||
from ..base.manager import PushOrVerifyHostCallbackMixin, AccountBasePlaybookManager
|
||||
from ..base.manager import AccountBasePlaybookManager
|
||||
|
||||
logger = get_logger(__name__)
|
||||
|
||||
|
||||
class VerifyAccountManager(PushOrVerifyHostCallbackMixin, AccountBasePlaybookManager):
|
||||
class VerifyAccountManager(AccountBasePlaybookManager):
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
super().__init__(*args, **kwargs)
|
||||
self.host_account_mapper = {}
|
||||
|
||||
def prepare_runtime_dir(self):
|
||||
path = super().prepare_runtime_dir()
|
||||
ansible_config_path = os.path.join(path, 'ansible.cfg')
|
||||
|
||||
with open(ansible_config_path, 'w') as f:
|
||||
f.write('[ssh_connection]\n')
|
||||
f.write('ssh_args = -o ControlMaster=no -o ControlPersist=no\n')
|
||||
return path
|
||||
|
||||
def host_callback(self, host, asset=None, account=None, automation=None, path_dir=None, **kwargs):
|
||||
host = super().host_callback(
|
||||
host, asset=asset, account=account,
|
||||
automation=automation, path_dir=path_dir, **kwargs
|
||||
)
|
||||
if host.get('error'):
|
||||
return host
|
||||
|
||||
# host['ssh_args'] = '-o ControlMaster=no -o ControlPersist=no'
|
||||
accounts = asset.accounts.all()
|
||||
accounts = self.get_accounts(account, accounts)
|
||||
inventory_hosts = []
|
||||
|
||||
for account in accounts:
|
||||
h = deepcopy(host)
|
||||
h['name'] += '(' + account.username + ')'
|
||||
self.host_account_mapper[h['name']] = account
|
||||
secret = account.secret
|
||||
|
||||
private_key_path = None
|
||||
if account.secret_type == SecretType.SSH_KEY:
|
||||
private_key_path = self.generate_private_key_path(secret, path_dir)
|
||||
secret = self.generate_public_key(secret)
|
||||
|
||||
h['secret_type'] = account.secret_type
|
||||
h['account'] = {
|
||||
'name': account.name,
|
||||
'username': account.username,
|
||||
'secret_type': account.secret_type,
|
||||
'secret': secret,
|
||||
'private_key_path': private_key_path
|
||||
}
|
||||
if account.platform.type == 'oracle':
|
||||
h['account']['mode'] = 'sysdba' if account.privileged else None
|
||||
inventory_hosts.append(h)
|
||||
return inventory_hosts
|
||||
|
||||
@classmethod
|
||||
def method_type(cls):
|
||||
return AutomationTypes.verify_account
|
||||
|
@ -29,4 +78,4 @@ class VerifyAccountManager(PushOrVerifyHostCallbackMixin, AccountBasePlaybookMan
|
|||
|
||||
def on_host_error(self, host, error, result):
|
||||
account = self.host_account_mapper.get(host)
|
||||
account.set_connectivity(Connectivity.FAILED)
|
||||
account.set_connectivity(Connectivity.ERR)
|
||||
|
|
|
@ -6,7 +6,7 @@ from django_filters import rest_framework as drf_filters
|
|||
from assets.models import Node
|
||||
from common.drf.filters import BaseFilterSet
|
||||
|
||||
from .models import Account
|
||||
from .models import Account, GatheredAccount
|
||||
|
||||
|
||||
class AccountFilterSet(BaseFilterSet):
|
||||
|
@ -47,3 +47,15 @@ class AccountFilterSet(BaseFilterSet):
|
|||
class Meta:
|
||||
model = Account
|
||||
fields = ['id', 'asset_id']
|
||||
|
||||
|
||||
class GatheredAccountFilterSet(BaseFilterSet):
|
||||
node_id = drf_filters.CharFilter(method='filter_nodes')
|
||||
|
||||
@staticmethod
|
||||
def filter_nodes(queryset, name, value):
|
||||
return AccountFilterSet.filter_nodes(queryset, name, value)
|
||||
|
||||
class Meta:
|
||||
model = GatheredAccount
|
||||
fields = ['id', 'asset_id', 'username']
|
||||
|
|
|
@ -29,8 +29,7 @@ class Migration(migrations.Migration):
|
|||
('id', models.UUIDField(default=uuid.uuid4, primary_key=True, serialize=False)),
|
||||
('org_id',
|
||||
models.CharField(blank=True, db_index=True, default='', max_length=36, verbose_name='Organization')),
|
||||
('connectivity', models.CharField(choices=[('unknown', 'Unknown'), ('ok', 'Ok'), ('failed', 'Failed')],
|
||||
default='unknown', max_length=16, verbose_name='Connectivity')),
|
||||
('connectivity', models.CharField(choices=[('-', 'Unknown'), ('ok', 'Ok'), ('err', 'Error')], default='-', max_length=16, verbose_name='Connectivity')),
|
||||
('date_verified', models.DateTimeField(null=True, verbose_name='Date verified')),
|
||||
('name', models.CharField(max_length=128, verbose_name='Name')),
|
||||
('username', models.CharField(blank=True, db_index=True, max_length=128, verbose_name='Username')),
|
||||
|
@ -51,7 +50,6 @@ class Migration(migrations.Migration):
|
|||
options={
|
||||
'verbose_name': 'Account',
|
||||
'permissions': [('view_accountsecret', 'Can view asset account secret'),
|
||||
('change_accountsecret', 'Can change asset account secret'),
|
||||
('view_historyaccount', 'Can view asset history account'),
|
||||
('view_historyaccountsecret', 'Can view asset history account secret')],
|
||||
'unique_together': {('username', 'asset', 'secret_type'), ('name', 'asset')},
|
||||
|
|
|
@ -0,0 +1,38 @@
|
|||
# Generated by Django 3.2.16 on 2023-02-07 04:41
|
||||
|
||||
from django.db import migrations, models
|
||||
import django.db.models.deletion
|
||||
import uuid
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('assets', '0108_alter_platform_charset'),
|
||||
('accounts', '0005_alter_changesecretrecord_options'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.CreateModel(
|
||||
name='GatheredAccount',
|
||||
fields=[
|
||||
('created_by', models.CharField(blank=True, max_length=128, null=True, verbose_name='Created by')),
|
||||
('updated_by', models.CharField(blank=True, max_length=128, null=True, verbose_name='Updated by')),
|
||||
('date_created', models.DateTimeField(auto_now_add=True, null=True, verbose_name='Date created')),
|
||||
('date_updated', models.DateTimeField(auto_now=True, verbose_name='Date updated')),
|
||||
('comment', models.TextField(blank=True, default='', verbose_name='Comment')),
|
||||
('id', models.UUIDField(default=uuid.uuid4, primary_key=True, serialize=False)),
|
||||
('org_id', models.CharField(blank=True, db_index=True, default='', max_length=36, verbose_name='Organization')),
|
||||
('present', models.BooleanField(default=True, verbose_name='Present')),
|
||||
('date_last_login', models.DateTimeField(null=True, verbose_name='Date last login')),
|
||||
('username', models.CharField(blank=True, db_index=True, max_length=32, verbose_name='Username')),
|
||||
('address_last_login', models.CharField(default='', max_length=39, verbose_name='Address last login')),
|
||||
('asset', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='assets.asset', verbose_name='Asset')),
|
||||
],
|
||||
options={
|
||||
'verbose_name': 'Gather account',
|
||||
'ordering': ['asset'],
|
||||
'unique_together': {('username', 'asset')},
|
||||
},
|
||||
),
|
||||
]
|
|
@ -0,0 +1,17 @@
|
|||
# Generated by Django 3.2.16 on 2023-02-16 11:07
|
||||
|
||||
from django.db import migrations
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('accounts', '0006_gatheredaccount'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AlterModelOptions(
|
||||
name='account',
|
||||
options={'permissions': [('view_accountsecret', 'Can view asset account secret'), ('view_historyaccount', 'Can view asset history account'), ('view_historyaccountsecret', 'Can view asset history account secret'), ('verify_account', 'Can verify account'), ('push_account', 'Can push account')], 'verbose_name': 'Account'},
|
||||
),
|
||||
]
|
|
@ -2,10 +2,10 @@ from django.db import models
|
|||
from django.utils.translation import gettext_lazy as _
|
||||
from simple_history.models import HistoricalRecords
|
||||
|
||||
from common.utils import lazyproperty
|
||||
from ..const import AliasAccount, Source
|
||||
from assets.models.base import AbsConnectivity
|
||||
from common.utils import lazyproperty
|
||||
from .base import BaseAccount
|
||||
from ..const import AliasAccount, Source
|
||||
|
||||
__all__ = ['Account', 'AccountTemplate']
|
||||
|
||||
|
@ -62,11 +62,15 @@ class Account(AbsConnectivity, BaseAccount):
|
|||
]
|
||||
permissions = [
|
||||
('view_accountsecret', _('Can view asset account secret')),
|
||||
('change_accountsecret', _('Can change asset account secret')),
|
||||
('view_historyaccount', _('Can view asset history account')),
|
||||
('view_historyaccountsecret', _('Can view asset history account secret')),
|
||||
('verify_account', _('Can verify account')),
|
||||
('push_account', _('Can push account')),
|
||||
]
|
||||
|
||||
def __str__(self):
|
||||
return '{}'.format(self.username)
|
||||
|
||||
@lazyproperty
|
||||
def platform(self):
|
||||
return self.asset.platform
|
||||
|
@ -77,9 +81,6 @@ class Account(AbsConnectivity, BaseAccount):
|
|||
return self.username
|
||||
return self.name
|
||||
|
||||
def __str__(self):
|
||||
return '{}'.format(self.username)
|
||||
|
||||
@lazyproperty
|
||||
def has_secret(self):
|
||||
return bool(self.secret)
|
||||
|
@ -89,10 +90,14 @@ class Account(AbsConnectivity, BaseAccount):
|
|||
""" @INPUT 手动登录的账号(any) """
|
||||
return cls(name=AliasAccount.INPUT.label, username=AliasAccount.INPUT.value, secret=None)
|
||||
|
||||
@lazyproperty
|
||||
def versions(self):
|
||||
return self.history.count()
|
||||
|
||||
@classmethod
|
||||
def get_user_account(cls, username):
|
||||
def get_user_account(cls):
|
||||
""" @USER 动态用户的账号(self) """
|
||||
return cls(name=AliasAccount.USER.label, username=AliasAccount.USER.value)
|
||||
return cls(name=AliasAccount.USER.label, username=AliasAccount.USER.value, secret=None)
|
||||
|
||||
def get_su_from_accounts(self):
|
||||
""" 排除自己和以自己为 su-from 的账号 """
|
||||
|
|
|
@ -5,11 +5,13 @@ import uuid
|
|||
|
||||
from celery import current_task
|
||||
from django.db import models
|
||||
from django.db.models import F
|
||||
from django.utils.translation import ugettext_lazy as _
|
||||
|
||||
from common.const.choices import Trigger
|
||||
from common.db.encoder import ModelJSONFieldEncoder
|
||||
from common.utils import get_logger
|
||||
from common.utils import lazyproperty
|
||||
from ops.mixin import PeriodTaskModelMixin
|
||||
from orgs.mixins.models import OrgModelMixin, JMSOrgBaseModel
|
||||
|
||||
|
@ -34,9 +36,9 @@ class AccountBackupAutomation(PeriodTaskModelMixin, JMSOrgBaseModel):
|
|||
verbose_name = _('Account backup plan')
|
||||
|
||||
def get_register_task(self):
|
||||
from ...tasks import execute_account_backup_plan
|
||||
from ...tasks import execute_account_backup_task
|
||||
name = "account_backup_plan_period_{}".format(str(self.id)[:8])
|
||||
task = execute_account_backup_plan.name
|
||||
task = execute_account_backup_task.name
|
||||
args = (str(self.id), Trigger.timing)
|
||||
kwargs = {}
|
||||
return name, task, args, kwargs
|
||||
|
@ -70,6 +72,10 @@ class AccountBackupAutomation(PeriodTaskModelMixin, JMSOrgBaseModel):
|
|||
)
|
||||
return execution.start()
|
||||
|
||||
@lazyproperty
|
||||
def latest_execution(self):
|
||||
return self.execution.first()
|
||||
|
||||
|
||||
class AccountBackupExecution(OrgModelMixin):
|
||||
id = models.UUIDField(default=uuid.uuid4, primary_key=True)
|
||||
|
@ -112,6 +118,15 @@ class AccountBackupExecution(OrgModelMixin):
|
|||
return []
|
||||
return recipients.values()
|
||||
|
||||
@lazyproperty
|
||||
def backup_accounts(self):
|
||||
from accounts.models import Account
|
||||
# TODO 可以优化一下查询 在账号上做 category 的缓存 避免数据量大时连表操作
|
||||
qs = Account.objects.filter(
|
||||
asset__platform__type__in=self.types
|
||||
).annotate(type=F('asset__platform__type'))
|
||||
return qs
|
||||
|
||||
@property
|
||||
def manager_type(self):
|
||||
return 'backup_account'
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
from django.db import models
|
||||
from django.utils.translation import gettext_lazy as _
|
||||
|
||||
from accounts.tasks import execute_account_automation_task
|
||||
from assets.models.automations import (
|
||||
BaseAutomation as AssetBaseAutomation,
|
||||
AutomationExecution as AssetAutomationExecution
|
||||
|
@ -14,6 +14,10 @@ class AccountBaseAutomation(AssetBaseAutomation):
|
|||
proxy = True
|
||||
verbose_name = _("Account automation task")
|
||||
|
||||
@property
|
||||
def execute_task(self):
|
||||
return execute_account_automation_task
|
||||
|
||||
@property
|
||||
def execution_model(self):
|
||||
return AutomationExecution
|
||||
|
|
|
@ -16,11 +16,11 @@ class ChangeSecretMixin(models.Model):
|
|||
choices=SecretType.choices, max_length=16,
|
||||
default=SecretType.PASSWORD, verbose_name=_('Secret type')
|
||||
)
|
||||
secret = fields.EncryptTextField(blank=True, null=True, verbose_name=_('Secret'))
|
||||
secret_strategy = models.CharField(
|
||||
choices=SecretStrategy.choices, max_length=16,
|
||||
default=SecretStrategy.custom, verbose_name=_('Secret strategy')
|
||||
)
|
||||
secret = fields.EncryptTextField(blank=True, null=True, verbose_name=_('Secret'))
|
||||
password_rules = models.JSONField(default=dict, verbose_name=_('Password rules'))
|
||||
ssh_key_change_strategy = models.CharField(
|
||||
choices=SSHKeyStrategy.choices, max_length=16,
|
||||
|
|
|
@ -1,9 +1,35 @@
|
|||
from django.utils.translation import ugettext_lazy as _
|
||||
|
||||
from django.db import models
|
||||
from django.utils.translation import ugettext_lazy as _
|
||||
|
||||
from orgs.mixins.models import JMSOrgBaseModel
|
||||
from accounts.const import AutomationTypes
|
||||
from .base import AccountBaseAutomation
|
||||
|
||||
__all__ = ['GatherAccountsAutomation']
|
||||
__all__ = ['GatherAccountsAutomation', 'GatheredAccount']
|
||||
|
||||
|
||||
class GatheredAccount(JMSOrgBaseModel):
|
||||
present = models.BooleanField(default=True, verbose_name=_("Present"))
|
||||
date_last_login = models.DateTimeField(null=True, verbose_name=_("Date last login"))
|
||||
asset = models.ForeignKey('assets.Asset', on_delete=models.CASCADE, verbose_name=_("Asset"))
|
||||
username = models.CharField(max_length=32, blank=True, db_index=True, verbose_name=_('Username'))
|
||||
address_last_login = models.CharField(max_length=39, default='', verbose_name=_("Address last login"))
|
||||
|
||||
@property
|
||||
def address(self):
|
||||
return self.asset.address
|
||||
|
||||
class Meta:
|
||||
verbose_name = _('Gather account')
|
||||
unique_together = [
|
||||
('username', 'asset'),
|
||||
]
|
||||
ordering = ['asset']
|
||||
|
||||
def __str__(self):
|
||||
return '{}: {}'.format(self.asset, self.username)
|
||||
|
||||
|
||||
class GatherAccountsAutomation(AccountBaseAutomation):
|
||||
|
|
|
@ -9,7 +9,6 @@ __all__ = ['PushAccountAutomation']
|
|||
|
||||
|
||||
class PushAccountAutomation(ChangeSecretMixin, AccountBaseAutomation):
|
||||
accounts = None
|
||||
triggers = models.JSONField(max_length=16, default=list, verbose_name=_('Triggers'))
|
||||
username = models.CharField(max_length=128, verbose_name=_('Username'))
|
||||
action = models.CharField(max_length=16, verbose_name=_('Action'))
|
||||
|
|
|
@ -50,7 +50,7 @@ class BaseAccount(JMSOrgBaseModel):
|
|||
return bool(self.username)
|
||||
|
||||
@property
|
||||
def specific(self):
|
||||
def spec_info(self):
|
||||
data = {}
|
||||
if self.secret_type != SecretType.SSH_KEY:
|
||||
return data
|
||||
|
@ -92,6 +92,9 @@ class BaseAccount(JMSOrgBaseModel):
|
|||
else:
|
||||
return ''
|
||||
|
||||
if not public_key:
|
||||
return ''
|
||||
|
||||
public_key_obj = sshpubkeys.SSHKey(public_key)
|
||||
fingerprint = public_key_obj.hash_md5()
|
||||
return fingerprint
|
||||
|
@ -106,7 +109,7 @@ class BaseAccount(JMSOrgBaseModel):
|
|||
|
||||
@property
|
||||
def private_key_path(self):
|
||||
if not self.secret_type != SecretType.SSH_KEY \
|
||||
if self.secret_type != SecretType.SSH_KEY \
|
||||
or not self.secret \
|
||||
or not self.private_key:
|
||||
return None
|
||||
|
|
|
@ -2,3 +2,4 @@ from .account import *
|
|||
from .backup import *
|
||||
from .base import *
|
||||
from .template import *
|
||||
from .gathered_account import *
|
||||
|
|
|
@ -1,76 +1,83 @@
|
|||
from django.utils.translation import ugettext_lazy as _
|
||||
from rest_framework import serializers
|
||||
|
||||
from assets.models import Asset
|
||||
from accounts.const import SecretType, Source
|
||||
from accounts.models import Account, AccountTemplate
|
||||
from accounts.tasks import push_accounts_to_assets
|
||||
from common.serializers.fields import ObjectRelatedField, LabeledChoiceField
|
||||
from accounts.tasks import push_accounts_to_assets_task
|
||||
from assets.const import Category, AllTypes
|
||||
from assets.models import Asset
|
||||
from common.serializers import SecretReadableMixin, BulkModelSerializer
|
||||
from common.serializers.fields import ObjectRelatedField, LabeledChoiceField
|
||||
from .base import BaseAccountSerializer
|
||||
|
||||
|
||||
class AccountSerializerCreateValidateMixin:
|
||||
replace_attrs: callable
|
||||
from_id: str
|
||||
template: bool
|
||||
push_now: bool
|
||||
replace_attrs: callable
|
||||
|
||||
def validate(self, attrs):
|
||||
_id = attrs.pop('id', None)
|
||||
if _id:
|
||||
def to_internal_value(self, data):
|
||||
from_id = data.pop('id', None)
|
||||
ret = super().to_internal_value(data)
|
||||
self.from_id = from_id
|
||||
return ret
|
||||
|
||||
def set_secret(self, attrs):
|
||||
_id = self.from_id
|
||||
template = attrs.pop('template', None)
|
||||
|
||||
if _id and template:
|
||||
account_template = AccountTemplate.objects.get(id=_id)
|
||||
attrs['secret'] = account_template.secret
|
||||
account_template = attrs.pop('template', None)
|
||||
if account_template:
|
||||
self.replace_attrs(account_template, attrs)
|
||||
self.push_now = attrs.pop('push_now', False)
|
||||
return super().validate(attrs)
|
||||
elif _id and not template:
|
||||
account = Account.objects.get(id=_id)
|
||||
attrs['secret'] = account.secret
|
||||
return attrs
|
||||
|
||||
def validate(self, attrs):
|
||||
attrs = super().validate(attrs)
|
||||
return self.set_secret(attrs)
|
||||
|
||||
@staticmethod
|
||||
def push_account(instance, push_now):
|
||||
if not push_now:
|
||||
return
|
||||
push_accounts_to_assets_task.delay([str(instance.id)])
|
||||
|
||||
def create(self, validated_data):
|
||||
push_now = validated_data.pop('push_now', None)
|
||||
instance = super().create(validated_data)
|
||||
self.push_account(instance, push_now)
|
||||
return instance
|
||||
|
||||
def update(self, instance, validated_data):
|
||||
# account cannot be modified
|
||||
validated_data.pop('username', None)
|
||||
push_now = validated_data.pop('push_now', None)
|
||||
instance = super().update(instance, validated_data)
|
||||
self.push_account(instance, push_now)
|
||||
return instance
|
||||
|
||||
|
||||
class AccountSerializerCreateMixin(
|
||||
AccountSerializerCreateValidateMixin, BulkModelSerializer
|
||||
):
|
||||
template = serializers.UUIDField(
|
||||
required=False, allow_null=True, write_only=True,
|
||||
label=_('Account template')
|
||||
class AccountSerializerCreateMixin(AccountSerializerCreateValidateMixin, BulkModelSerializer):
|
||||
template = serializers.BooleanField(
|
||||
default=False, label=_("Template"), write_only=True
|
||||
)
|
||||
push_now = serializers.BooleanField(
|
||||
default=False, label=_("Push now"), write_only=True
|
||||
)
|
||||
has_secret = serializers.BooleanField(label=_("Has secret"), read_only=True)
|
||||
|
||||
@staticmethod
|
||||
def validate_template(value):
|
||||
try:
|
||||
return AccountTemplate.objects.get(id=value)
|
||||
except AccountTemplate.DoesNotExist:
|
||||
raise serializers.ValidationError(_('Account template not found'))
|
||||
|
||||
@staticmethod
|
||||
def replace_attrs(account_template: AccountTemplate, attrs: dict):
|
||||
exclude_fields = [
|
||||
'_state', 'org_id', 'id', 'date_created',
|
||||
'date_updated'
|
||||
]
|
||||
template_attrs = {
|
||||
k: v for k, v in account_template.__dict__.items()
|
||||
if k not in exclude_fields
|
||||
}
|
||||
for k, v in template_attrs.items():
|
||||
attrs.setdefault(k, v)
|
||||
|
||||
def create(self, validated_data):
|
||||
instance = super().create(validated_data)
|
||||
if self.push_now:
|
||||
push_accounts_to_assets.delay([instance.id], [instance.asset_id])
|
||||
return instance
|
||||
|
||||
|
||||
class AccountAssetSerializer(serializers.ModelSerializer):
|
||||
platform = ObjectRelatedField(read_only=True)
|
||||
category = LabeledChoiceField(choices=Category.choices, read_only=True, label=_('Category'))
|
||||
type = LabeledChoiceField(choices=AllTypes.choices(), read_only=True, label=_('Type'))
|
||||
|
||||
class Meta:
|
||||
model = Asset
|
||||
fields = ['id', 'name', 'address', 'platform']
|
||||
fields = ['id', 'name', 'address', 'type', 'category', 'platform']
|
||||
|
||||
def to_internal_value(self, data):
|
||||
if isinstance(data, dict):
|
||||
|
@ -94,9 +101,10 @@ class AccountSerializer(AccountSerializerCreateMixin, BaseAccountSerializer):
|
|||
|
||||
class Meta(BaseAccountSerializer.Meta):
|
||||
model = Account
|
||||
fields = BaseAccountSerializer.Meta.fields \
|
||||
+ ['su_from', 'version', 'asset'] \
|
||||
+ ['template', 'push_now', 'source']
|
||||
fields = BaseAccountSerializer.Meta.fields + [
|
||||
'su_from', 'asset', 'template', 'version',
|
||||
'push_now', 'source', 'connectivity',
|
||||
]
|
||||
extra_kwargs = {
|
||||
**BaseAccountSerializer.Meta.extra_kwargs,
|
||||
'name': {'required': False, 'allow_null': True},
|
||||
|
@ -110,7 +118,8 @@ class AccountSerializer(AccountSerializerCreateMixin, BaseAccountSerializer):
|
|||
@classmethod
|
||||
def setup_eager_loading(cls, queryset):
|
||||
""" Perform necessary eager loading of data. """
|
||||
queryset = queryset.prefetch_related('asset', 'asset__platform')
|
||||
queryset = queryset \
|
||||
.prefetch_related('asset', 'asset__platform')
|
||||
return queryset
|
||||
|
||||
|
||||
|
@ -133,6 +142,11 @@ class AccountHistorySerializer(serializers.ModelSerializer):
|
|||
class AccountTaskSerializer(serializers.Serializer):
|
||||
ACTION_CHOICES = (
|
||||
('test', 'test'),
|
||||
('verify', 'verify'),
|
||||
('push', 'push'),
|
||||
)
|
||||
action = serializers.ChoiceField(choices=ACTION_CHOICES, write_only=True)
|
||||
accounts = serializers.PrimaryKeyRelatedField(
|
||||
queryset=Account.objects, required=False, allow_empty=True, many=True
|
||||
)
|
||||
task = serializers.CharField(read_only=True)
|
||||
|
|
|
@ -3,13 +3,12 @@
|
|||
from django.utils.translation import ugettext as _
|
||||
from rest_framework import serializers
|
||||
|
||||
from orgs.mixins.serializers import BulkOrgResourceModelSerializer
|
||||
from ops.mixin import PeriodTaskSerializerMixin
|
||||
from common.utils import get_logger
|
||||
from accounts.models import AccountBackupAutomation, AccountBackupExecution
|
||||
from common.const.choices import Trigger
|
||||
from common.serializers.fields import LabeledChoiceField
|
||||
|
||||
from accounts.models import AccountBackupAutomation, AccountBackupExecution
|
||||
from common.utils import get_logger
|
||||
from ops.mixin import PeriodTaskSerializerMixin
|
||||
from orgs.mixins.serializers import BulkOrgResourceModelSerializer
|
||||
|
||||
logger = get_logger(__file__)
|
||||
|
||||
|
@ -20,18 +19,22 @@ class AccountBackupSerializer(PeriodTaskSerializerMixin, BulkOrgResourceModelSer
|
|||
class Meta:
|
||||
model = AccountBackupAutomation
|
||||
read_only_fields = [
|
||||
'date_created', 'date_updated', 'created_by', 'periodic_display', 'executed_amount'
|
||||
'date_created', 'date_updated', 'created_by',
|
||||
'periodic_display', 'executed_amount'
|
||||
]
|
||||
fields = read_only_fields + [
|
||||
'id', 'name', 'is_periodic', 'interval', 'crontab', 'comment', 'recipients', 'types'
|
||||
'id', 'name', 'is_periodic', 'interval', 'crontab',
|
||||
'comment', 'recipients', 'types'
|
||||
]
|
||||
extra_kwargs = {
|
||||
'name': {'required': True},
|
||||
'periodic_display': {'label': _('Periodic perform')},
|
||||
'executed_amount': {'label': _('Executed amount')},
|
||||
'recipients': {'label': _('Recipient'), 'help_text': _(
|
||||
'Currently only mail sending is supported'
|
||||
)}
|
||||
'recipients': {
|
||||
'label': _('Recipient'),
|
||||
'help_text': _('Currently only mail sending is supported')
|
||||
},
|
||||
'types': {'label': _('Asset type')}
|
||||
}
|
||||
|
||||
|
||||
|
@ -41,7 +44,7 @@ class AccountBackupPlanExecutionSerializer(serializers.ModelSerializer):
|
|||
class Meta:
|
||||
model = AccountBackupExecution
|
||||
read_only_fields = [
|
||||
'id', 'date_start', 'timedelta', 'plan_snapshot', 'trigger', 'reason',
|
||||
'is_success', 'org_id', 'recipients'
|
||||
'id', 'date_start', 'timedelta', 'plan_snapshot',
|
||||
'trigger', 'reason', 'is_success', 'org_id', 'recipients'
|
||||
]
|
||||
fields = read_only_fields + ['plan']
|
||||
|
|
|
@ -24,30 +24,28 @@ class AuthValidateMixin(serializers.Serializer):
|
|||
write_only=True, label=_('Key password')
|
||||
)
|
||||
|
||||
@property
|
||||
def initial_secret_type(self):
|
||||
secret_type = self.initial_data.get('secret_type')
|
||||
return secret_type
|
||||
|
||||
def validate_secret(self, secret):
|
||||
@staticmethod
|
||||
def handle_secret(secret, secret_type, passphrase=None):
|
||||
if not secret:
|
||||
return ''
|
||||
secret_type = self.initial_secret_type
|
||||
if secret_type == SecretType.PASSWORD:
|
||||
validate_password_for_ansible(secret)
|
||||
return secret
|
||||
elif secret_type == SecretType.SSH_KEY:
|
||||
passphrase = self.initial_data.get('passphrase')
|
||||
passphrase = passphrase if passphrase else None
|
||||
return validate_ssh_key(secret, passphrase)
|
||||
else:
|
||||
return secret
|
||||
|
||||
@staticmethod
|
||||
def clean_auth_fields(validated_data):
|
||||
def clean_auth_fields(self, validated_data):
|
||||
secret_type = validated_data.get('secret_type')
|
||||
passphrase = validated_data.get('passphrase')
|
||||
secret = validated_data.pop('secret', None)
|
||||
self.handle_secret(secret, secret_type, passphrase)
|
||||
validated_data['secret'] = secret
|
||||
for field in ('secret',):
|
||||
value = validated_data.get(field)
|
||||
if value is None:
|
||||
if not value:
|
||||
validated_data.pop(field, None)
|
||||
validated_data.pop('passphrase', None)
|
||||
|
||||
|
@ -68,14 +66,15 @@ class BaseAccountSerializer(AuthValidateMixin, BulkOrgResourceModelSerializer):
|
|||
fields_mini = ['id', 'name', 'username']
|
||||
fields_small = fields_mini + [
|
||||
'secret_type', 'secret', 'has_secret', 'passphrase',
|
||||
'privileged', 'is_active', 'specific',
|
||||
'privileged', 'is_active', 'spec_info',
|
||||
]
|
||||
fields_other = ['created_by', 'date_created', 'date_updated', 'comment']
|
||||
fields = fields_small + fields_other
|
||||
read_only_fields = [
|
||||
'has_secret', 'specific',
|
||||
'has_secret', 'spec_info',
|
||||
'date_verified', 'created_by', 'date_created',
|
||||
]
|
||||
extra_kwargs = {
|
||||
'specific': {'label': _('Specific')},
|
||||
'name': {'required': True},
|
||||
'spec_info': {'label': _('Spec info')},
|
||||
}
|
||||
|
|
|
@ -0,0 +1,23 @@
|
|||
from django.utils.translation import ugettext_lazy as _
|
||||
|
||||
from accounts.models import GatheredAccount
|
||||
from orgs.mixins.serializers import BulkOrgResourceModelSerializer
|
||||
from .account import AccountAssetSerializer
|
||||
from .base import BaseAccountSerializer
|
||||
|
||||
|
||||
class GatheredAccountSerializer(BulkOrgResourceModelSerializer):
|
||||
asset = AccountAssetSerializer(label=_('Asset'))
|
||||
|
||||
class Meta(BaseAccountSerializer.Meta):
|
||||
model = GatheredAccount
|
||||
fields = [
|
||||
'id', 'present', 'asset', 'username',
|
||||
'date_updated', 'address_last_login', 'date_last_login'
|
||||
]
|
||||
|
||||
@classmethod
|
||||
def setup_eager_loading(cls, queryset):
|
||||
""" Perform necessary eager loading of data. """
|
||||
queryset = queryset.prefetch_related('asset', 'asset__platform')
|
||||
return queryset
|
|
@ -1,14 +1,14 @@
|
|||
from django.utils.translation import ugettext as _
|
||||
from rest_framework import serializers
|
||||
|
||||
from ops.mixin import PeriodTaskSerializerMixin
|
||||
from accounts.models import AutomationExecution
|
||||
from assets.const import AutomationTypes
|
||||
from assets.models import Asset, Node, BaseAutomation
|
||||
from accounts.models import AutomationExecution
|
||||
from orgs.mixins.serializers import BulkOrgResourceModelSerializer
|
||||
from common.utils import get_logger
|
||||
from common.const.choices import Trigger
|
||||
from common.serializers.fields import ObjectRelatedField, LabeledChoiceField
|
||||
from common.utils import get_logger
|
||||
from ops.mixin import PeriodTaskSerializerMixin
|
||||
from orgs.mixins.serializers import BulkOrgResourceModelSerializer
|
||||
|
||||
logger = get_logger(__file__)
|
||||
|
||||
|
@ -37,6 +37,17 @@ class BaseAutomationSerializer(PeriodTaskSerializerMixin, BulkOrgResourceModelSe
|
|||
'executed_amount': {'label': _('Executed amount')},
|
||||
}
|
||||
|
||||
def validate_name(self, name):
|
||||
if self.instance and self.instance.name == name:
|
||||
return name
|
||||
if BaseAutomation.objects.filter(name=name, type=self.model_type).exists():
|
||||
raise serializers.ValidationError(_('Name already exists'))
|
||||
return name
|
||||
|
||||
@property
|
||||
def model_type(self):
|
||||
raise NotImplementedError
|
||||
|
||||
|
||||
class AutomationExecutionSerializer(serializers.ModelSerializer):
|
||||
snapshot = serializers.SerializerMethodField(label=_('Automation snapshot'))
|
||||
|
|
|
@ -4,13 +4,13 @@ from django.utils.translation import ugettext as _
|
|||
from rest_framework import serializers
|
||||
|
||||
from accounts.const import (
|
||||
DEFAULT_PASSWORD_RULES, SecretType, SecretStrategy, SSHKeyStrategy
|
||||
AutomationTypes, DEFAULT_PASSWORD_RULES,
|
||||
SecretType, SecretStrategy, SSHKeyStrategy
|
||||
)
|
||||
from accounts.models import (
|
||||
Account, ChangeSecretAutomation,
|
||||
ChangeSecretRecord
|
||||
ChangeSecretRecord, AutomationExecution
|
||||
)
|
||||
from accounts.models import AutomationExecution
|
||||
from accounts.serializers import AuthValidateMixin
|
||||
from assets.models import Asset
|
||||
from common.serializers.fields import LabeledChoiceField, ObjectRelatedField
|
||||
|
@ -53,13 +53,17 @@ class ChangeSecretAutomationSerializer(AuthValidateMixin, BaseAutomationSerializ
|
|||
'ssh_key_change_strategy', 'passphrase', 'recipients',
|
||||
]
|
||||
extra_kwargs = {**BaseAutomationSerializer.Meta.extra_kwargs, **{
|
||||
'accounts': {'required': True},
|
||||
'recipients': {'label': _('Recipient'), 'help_text': _(
|
||||
"Currently only mail sending is supported"
|
||||
)},
|
||||
}}
|
||||
@property
|
||||
def model_type(self):
|
||||
return AutomationTypes.change_secret
|
||||
|
||||
def validate_password_rules(self, password_rules):
|
||||
secret_type = self.initial_secret_type
|
||||
secret_type = self.initial_data['secret_type']
|
||||
if secret_type != SecretType.PASSWORD:
|
||||
return password_rules
|
||||
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
#
|
||||
from django.utils.translation import ugettext_lazy as _
|
||||
from accounts.const import AutomationTypes
|
||||
from accounts.models import GatherAccountsAutomation
|
||||
from common.utils import get_logger
|
||||
|
||||
|
@ -20,3 +20,7 @@ class GatherAccountAutomationSerializer(BaseAutomationSerializer):
|
|||
fields = BaseAutomationSerializer.Meta.fields + read_only_fields
|
||||
|
||||
extra_kwargs = BaseAutomationSerializer.Meta.extra_kwargs
|
||||
|
||||
@property
|
||||
def model_type(self):
|
||||
return AutomationTypes.gather_accounts
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
import copy
|
||||
from accounts.const import AutomationTypes
|
||||
from accounts.models import PushAccountAutomation
|
||||
from .change_secret import (
|
||||
ChangeSecretAutomationSerializer, ChangeSecretUpdateAssetSerializer,
|
||||
|
@ -7,58 +7,16 @@ from .change_secret import (
|
|||
|
||||
|
||||
class PushAccountAutomationSerializer(ChangeSecretAutomationSerializer):
|
||||
# dynamic_username = serializers.BooleanField(label=_('Dynamic username'), default=False)
|
||||
# triggers = TreeChoicesField(
|
||||
# choice_cls=TriggerChoice, label=_('Triggers'),
|
||||
# default=TriggerChoice.all(),
|
||||
# )
|
||||
# action = LabeledChoiceField(
|
||||
# choices=PushAccountActionChoice.choices, label=_('Action'),
|
||||
# default=PushAccountActionChoice.create_and_push
|
||||
# )
|
||||
|
||||
class Meta(ChangeSecretAutomationSerializer.Meta):
|
||||
model = PushAccountAutomation
|
||||
fields = copy.copy(ChangeSecretAutomationSerializer.Meta.fields)
|
||||
fields.remove('recipients')
|
||||
fields = [
|
||||
n for n in ChangeSecretAutomationSerializer.Meta.fields
|
||||
if n not in ['recipients']
|
||||
]
|
||||
|
||||
# fields = ChangeSecretAutomationSerializer.Meta.fields + [
|
||||
# 'dynamic_username', 'triggers', 'action'
|
||||
# ]
|
||||
|
||||
# def validate_username(self, value):
|
||||
# if self.initial_data.get('dynamic_username'):
|
||||
# value = '@USER'
|
||||
# queryset = self.Meta.model.objects.filter(username=value)
|
||||
# if self.instance:
|
||||
# queryset = queryset.exclude(id=self.instance.id)
|
||||
# if queryset.exists():
|
||||
# raise serializers.ValidationError(_('Username already exists'))
|
||||
# return value
|
||||
#
|
||||
# def validate_dynamic_username(self, value):
|
||||
# if not value:
|
||||
# return value
|
||||
# queryset = self.Meta.model.objects.filter(username='@USER')
|
||||
# if self.instance:
|
||||
# queryset = queryset.exclude(id=self.instance.id)
|
||||
# if queryset.exists():
|
||||
# raise serializers.ValidationError(_('Dynamic username already exists'))
|
||||
# return value
|
||||
#
|
||||
# def validate_triggers(self, value):
|
||||
# # Now triggers readonly, set all
|
||||
# return TriggerChoice.all()
|
||||
#
|
||||
# def get_field_names(self, declared_fields, info):
|
||||
# fields = super().get_field_names(declared_fields, info)
|
||||
# excludes = [
|
||||
# 'recipients', 'is_periodic', 'interval', 'crontab',
|
||||
# 'periodic_display', 'assets', 'nodes'
|
||||
# ]
|
||||
# fields = [f for f in fields if f not in excludes]
|
||||
# fields[fields.index('accounts')] = 'username'
|
||||
# return fields
|
||||
@property
|
||||
def model_type(self):
|
||||
return AutomationTypes.push_account
|
||||
|
||||
|
||||
class PushAccountUpdateAssetSerializer(ChangeSecretUpdateAssetSerializer):
|
||||
|
|
|
@ -1,26 +1,15 @@
|
|||
from django.db.models.signals import pre_save, post_save
|
||||
from django.db.models.signals import pre_save
|
||||
from django.dispatch import receiver
|
||||
|
||||
from assets.models import Asset
|
||||
from common.decorator import on_transaction_commit
|
||||
from common.utils import get_logger
|
||||
from .automations.push_account.manager import PushAccountManager
|
||||
from .models import Account
|
||||
|
||||
logger = get_logger(__name__)
|
||||
|
||||
|
||||
@receiver(pre_save, sender=Account)
|
||||
def on_account_pre_create(sender, instance, **kwargs):
|
||||
# 升级版本号
|
||||
instance.version += 1
|
||||
# 即使在 root 组织也不怕
|
||||
instance.org_id = instance.asset.org_id
|
||||
|
||||
|
||||
@receiver(post_save, sender=Asset)
|
||||
@on_transaction_commit
|
||||
def on_asset_create(sender, instance, created=False, **kwargs):
|
||||
if not created:
|
||||
return
|
||||
# PushAccountManager.trigger_by_asset_create(instance)
|
||||
def on_account_pre_save(sender, instance, created=False, **kwargs):
|
||||
if created:
|
||||
instance.version = 1
|
||||
else:
|
||||
instance.version = instance.history.count()
|
||||
|
|
|
@ -1,15 +1,30 @@
|
|||
from celery import shared_task
|
||||
from django.utils.translation import gettext_lazy as _
|
||||
|
||||
from orgs.utils import tmp_to_root_org, tmp_to_org
|
||||
from common.utils import get_logger, get_object_or_none
|
||||
from accounts.const import AutomationTypes
|
||||
from common.utils import get_logger, get_object_or_none
|
||||
from orgs.utils import tmp_to_org, tmp_to_root_org
|
||||
|
||||
logger = get_logger(__file__)
|
||||
|
||||
|
||||
@shared_task(queue='ansible', verbose_name=_('Account execute automation'))
|
||||
def execute_automation(pid, trigger, tp):
|
||||
def task_activity_callback(self, pid, trigger, tp):
|
||||
model = AutomationTypes.get_type_model(tp)
|
||||
with tmp_to_root_org():
|
||||
instance = get_object_or_none(model, pk=pid)
|
||||
if not instance:
|
||||
return
|
||||
if not instance.latest_execution:
|
||||
return
|
||||
resource_ids = instance.latest_execution.get_all_asset_ids()
|
||||
return resource_ids, instance.org_id
|
||||
|
||||
|
||||
@shared_task(
|
||||
queue='ansible', verbose_name=_('Account execute automation'),
|
||||
activity_callback=task_activity_callback
|
||||
)
|
||||
def execute_account_automation_task(pid, trigger, tp):
|
||||
model = AutomationTypes.get_type_model(tp)
|
||||
with tmp_to_root_org():
|
||||
instance = get_object_or_none(model, pk=pid)
|
||||
|
|
|
@ -3,15 +3,28 @@
|
|||
from celery import shared_task
|
||||
from django.utils.translation import gettext_lazy as _
|
||||
|
||||
from accounts.models import AccountBackupAutomation
|
||||
from common.utils import get_object_or_none, get_logger
|
||||
from orgs.utils import tmp_to_org, tmp_to_root_org
|
||||
|
||||
logger = get_logger(__file__)
|
||||
|
||||
|
||||
@shared_task(verbose_name=_('Execute account backup plan'))
|
||||
def execute_account_backup_plan(pid, trigger):
|
||||
def task_activity_callback(self, pid, trigger):
|
||||
from accounts.models import AccountBackupAutomation
|
||||
with tmp_to_root_org():
|
||||
plan = get_object_or_none(AccountBackupAutomation, pk=pid)
|
||||
if not plan:
|
||||
return
|
||||
if not plan.latest_execution:
|
||||
return
|
||||
resource_ids = plan.latest_execution.backup_accounts
|
||||
org_id = plan.org_id
|
||||
return resource_ids, org_id
|
||||
|
||||
|
||||
@shared_task(verbose_name=_('Execute account backup plan'), activity_callback=task_activity_callback)
|
||||
def execute_account_backup_task(pid, trigger):
|
||||
from accounts.models import AccountBackupAutomation
|
||||
with tmp_to_root_org():
|
||||
plan = get_object_or_none(AccountBackupAutomation, pk=pid)
|
||||
if not plan:
|
||||
|
|
|
@ -1,21 +1,17 @@
|
|||
|
||||
import uuid
|
||||
|
||||
from assets.tasks.common import generate_data
|
||||
from assets.tasks.common import generate_automation_execution_data
|
||||
from common.const.choices import Trigger
|
||||
|
||||
|
||||
def automation_execute_start(task_name, tp, child_snapshot=None):
|
||||
def quickstart_automation_by_snapshot(task_name, tp, task_snapshot=None):
|
||||
from accounts.models import AutomationExecution
|
||||
data = generate_data(task_name, tp, child_snapshot)
|
||||
data = generate_automation_execution_data(task_name, tp, task_snapshot)
|
||||
|
||||
pk = data['id']
|
||||
if AutomationExecution.objects.filter(id=pk).exists():
|
||||
data['id'] = str(uuid.uuid4())
|
||||
|
||||
while True:
|
||||
try:
|
||||
_id = data['id']
|
||||
AutomationExecution.objects.get(id=_id)
|
||||
data['id'] = str(uuid.uuid4())
|
||||
except AutomationExecution.DoesNotExist:
|
||||
break
|
||||
execution = AutomationExecution.objects.create(
|
||||
trigger=Trigger.manual, **data
|
||||
)
|
||||
|
|
|
@ -1,15 +1,15 @@
|
|||
# ~*~ coding: utf-8 ~*~
|
||||
from celery import shared_task
|
||||
from django.utils.translation import gettext_noop
|
||||
from django.utils.translation import gettext_lazy as _
|
||||
from django.utils.translation import gettext_noop
|
||||
|
||||
from accounts.const import AutomationTypes
|
||||
from accounts.tasks.common import quickstart_automation_by_snapshot
|
||||
from assets.models import Node
|
||||
from common.utils import get_logger
|
||||
from orgs.utils import org_aware_func
|
||||
from accounts.const import AutomationTypes
|
||||
from accounts.tasks.common import automation_execute_start
|
||||
|
||||
__all__ = ['gather_asset_accounts']
|
||||
__all__ = ['gather_asset_accounts_task']
|
||||
logger = get_logger(__name__)
|
||||
|
||||
|
||||
|
@ -18,15 +18,18 @@ def gather_asset_accounts_util(nodes, task_name):
|
|||
from accounts.models import GatherAccountsAutomation
|
||||
task_name = GatherAccountsAutomation.generate_unique_name(task_name)
|
||||
|
||||
child_snapshot = {
|
||||
task_snapshot = {
|
||||
'nodes': [str(node.id) for node in nodes],
|
||||
}
|
||||
tp = AutomationTypes.verify_account
|
||||
automation_execute_start(task_name, tp, child_snapshot)
|
||||
quickstart_automation_by_snapshot(task_name, tp, task_snapshot)
|
||||
|
||||
|
||||
@shared_task(queue="ansible", verbose_name=_('Gather asset accounts'))
|
||||
def gather_asset_accounts(node_ids, task_name=None):
|
||||
@shared_task(
|
||||
queue="ansible", verbose_name=_('Gather asset accounts'),
|
||||
activity_callback=lambda self, node_ids, task_name=None: (node_ids, None)
|
||||
)
|
||||
def gather_asset_accounts_task(node_ids, task_name=None):
|
||||
if task_name is None:
|
||||
task_name = gettext_noop("Gather assets accounts")
|
||||
|
||||
|
|
|
@ -1,43 +1,34 @@
|
|||
from celery import shared_task
|
||||
from django.utils.translation import gettext_noop, ugettext_lazy as _
|
||||
|
||||
from common.utils import get_logger
|
||||
from orgs.utils import org_aware_func
|
||||
from accounts.const import AutomationTypes
|
||||
from accounts.tasks.common import automation_execute_start
|
||||
from accounts.tasks.common import quickstart_automation_by_snapshot
|
||||
from common.utils import get_logger
|
||||
|
||||
logger = get_logger(__file__)
|
||||
__all__ = [
|
||||
'push_accounts_to_assets',
|
||||
'push_accounts_to_assets_task',
|
||||
]
|
||||
|
||||
|
||||
def push_util(account, assets, task_name):
|
||||
child_snapshot = {
|
||||
'secret': account.secret,
|
||||
'secret_type': account.secret_type,
|
||||
'accounts': [account.username],
|
||||
'assets': [str(asset.id) for asset in assets],
|
||||
}
|
||||
tp = AutomationTypes.push_account
|
||||
automation_execute_start(task_name, tp, child_snapshot)
|
||||
|
||||
|
||||
@org_aware_func("assets")
|
||||
def push_accounts_to_assets_util(accounts, assets):
|
||||
@shared_task(
|
||||
queue="ansible", verbose_name=_('Push accounts to assets'),
|
||||
activity_callback=lambda self, account_ids, asset_ids: (account_ids, None)
|
||||
)
|
||||
def push_accounts_to_assets_task(account_ids):
|
||||
from accounts.models import PushAccountAutomation
|
||||
|
||||
task_name = gettext_noop("Push accounts to assets")
|
||||
task_name = PushAccountAutomation.generate_unique_name(task_name)
|
||||
for account in accounts:
|
||||
push_util(account, assets, task_name)
|
||||
|
||||
|
||||
@shared_task(queue="ansible", verbose_name=_('Push accounts to assets'))
|
||||
def push_accounts_to_assets(account_ids, asset_ids):
|
||||
from assets.models import Asset
|
||||
from accounts.models import Account
|
||||
|
||||
assets = Asset.objects.filter(id__in=asset_ids)
|
||||
accounts = Account.objects.filter(id__in=account_ids)
|
||||
return push_accounts_to_assets_util(accounts, assets)
|
||||
task_name = gettext_noop("Push accounts to assets")
|
||||
task_name = PushAccountAutomation.generate_unique_name(task_name)
|
||||
|
||||
for account in accounts:
|
||||
task_snapshot = {
|
||||
'secret': account.secret,
|
||||
'secret_type': account.secret_type,
|
||||
'accounts': [account.username],
|
||||
'assets': [str(account.asset_id)],
|
||||
}
|
||||
tp = AutomationTypes.push_account
|
||||
quickstart_automation_by_snapshot(task_name, tp, task_snapshot)
|
||||
|
|
|
@ -1,16 +1,16 @@
|
|||
from celery import shared_task
|
||||
from django.utils.translation import gettext_lazy as _
|
||||
from django.utils.translation import gettext_noop
|
||||
from django.utils.translation import ugettext as _
|
||||
|
||||
from common.utils import get_logger
|
||||
from assets.const import GATEWAY_NAME
|
||||
from accounts.const import AutomationTypes
|
||||
from accounts.tasks.common import automation_execute_start
|
||||
from accounts.tasks.common import quickstart_automation_by_snapshot
|
||||
from assets.const import GATEWAY_NAME
|
||||
from common.utils import get_logger
|
||||
from orgs.utils import org_aware_func
|
||||
|
||||
logger = get_logger(__name__)
|
||||
__all__ = [
|
||||
'verify_accounts_connectivity'
|
||||
'verify_accounts_connectivity_task'
|
||||
]
|
||||
|
||||
|
||||
|
@ -18,32 +18,40 @@ def verify_connectivity_util(assets, tp, accounts, task_name):
|
|||
if not assets or not accounts:
|
||||
return
|
||||
account_usernames = list(accounts.values_list('username', flat=True))
|
||||
child_snapshot = {
|
||||
task_snapshot = {
|
||||
'accounts': account_usernames,
|
||||
'assets': [str(asset.id) for asset in assets],
|
||||
}
|
||||
automation_execute_start(task_name, tp, child_snapshot)
|
||||
quickstart_automation_by_snapshot(task_name, tp, task_snapshot)
|
||||
|
||||
|
||||
@org_aware_func("assets")
|
||||
def verify_accounts_connectivity_util(accounts, assets, task_name):
|
||||
gateway_assets = assets.filter(platform__name=GATEWAY_NAME)
|
||||
verify_connectivity_util(
|
||||
gateway_assets, AutomationTypes.verify_gateway_account, accounts, task_name
|
||||
)
|
||||
|
||||
non_gateway_assets = assets.exclude(platform__name=GATEWAY_NAME)
|
||||
verify_connectivity_util(
|
||||
non_gateway_assets, AutomationTypes.verify_account, accounts, task_name
|
||||
)
|
||||
|
||||
|
||||
@shared_task(queue="ansible", verbose_name=_('Verify asset account availability'))
|
||||
def verify_accounts_connectivity(account_ids, asset_ids):
|
||||
def verify_accounts_connectivity_util(accounts, task_name):
|
||||
from assets.models import Asset
|
||||
from accounts.models import Account, VerifyAccountAutomation
|
||||
|
||||
asset_ids = [a.asset_id for a in accounts]
|
||||
assets = Asset.objects.filter(id__in=asset_ids)
|
||||
|
||||
gateways = assets.filter(platform__name=GATEWAY_NAME)
|
||||
verify_connectivity_util(
|
||||
gateways, AutomationTypes.verify_gateway_account,
|
||||
accounts, task_name
|
||||
)
|
||||
|
||||
common_assets = assets.exclude(platform__name=GATEWAY_NAME)
|
||||
verify_connectivity_util(
|
||||
common_assets, AutomationTypes.verify_account,
|
||||
accounts, task_name
|
||||
)
|
||||
|
||||
|
||||
@shared_task(
|
||||
queue="ansible", verbose_name=_('Verify asset account availability'),
|
||||
activity_callback=lambda self, account_ids, asset_ids: (account_ids, None)
|
||||
)
|
||||
def verify_accounts_connectivity_task(account_ids):
|
||||
from accounts.models import Account, VerifyAccountAutomation
|
||||
accounts = Account.objects.filter(id__in=account_ids)
|
||||
task_name = gettext_noop("Verify accounts connectivity")
|
||||
task_name = VerifyAccountAutomation.generate_unique_name(task_name)
|
||||
return verify_accounts_connectivity_util(accounts, assets, task_name)
|
||||
return verify_accounts_connectivity_util(accounts, task_name)
|
||||
|
|
|
@ -9,6 +9,7 @@ app_name = 'accounts'
|
|||
router = BulkRouter()
|
||||
|
||||
router.register(r'accounts', api.AccountViewSet, 'account')
|
||||
router.register(r'gathered-accounts', api.GatheredAccountViewSet, 'gathered-account')
|
||||
router.register(r'account-secrets', api.AccountSecretsViewSet, 'account-secret')
|
||||
router.register(r'account-templates', api.AccountTemplateViewSet, 'account-template')
|
||||
router.register(r'account-template-secrets', api.AccountTemplateSecretsViewSet, 'account-template-secret')
|
||||
|
@ -24,17 +25,22 @@ router.register(r'push-account-executions', api.PushAccountExecutionViewSet, 'pu
|
|||
router.register(r'push-account-records', api.PushAccountRecordViewSet, 'push-account-record')
|
||||
|
||||
urlpatterns = [
|
||||
path('accounts/tasks/', api.AccountTaskCreateAPI.as_view(), name='account-task-create'),
|
||||
path('account-secrets/<uuid:pk>/histories/', api.AccountHistoriesSecretAPI.as_view(), name='account-secret-history'),
|
||||
path('accounts/tasks/', api.AccountsTaskCreateAPI.as_view(), name='account-task-create'),
|
||||
path('account-secrets/<uuid:pk>/histories/', api.AccountHistoriesSecretAPI.as_view(),
|
||||
name='account-secret-history'),
|
||||
|
||||
path('change-secret/<uuid:pk>/asset/remove/', api.ChangSecretRemoveAssetApi.as_view(), name='change-secret-remove-asset'),
|
||||
path('change-secret/<uuid:pk>/asset/remove/', api.ChangSecretRemoveAssetApi.as_view(),
|
||||
name='change-secret-remove-asset'),
|
||||
path('change-secret/<uuid:pk>/asset/add/', api.ChangSecretAddAssetApi.as_view(), name='change-secret-add-asset'),
|
||||
path('change-secret/<uuid:pk>/nodes/', api.ChangSecretNodeAddRemoveApi.as_view(), name='change-secret-add-or-remove-node'),
|
||||
path('change-secret/<uuid:pk>/nodes/', api.ChangSecretNodeAddRemoveApi.as_view(),
|
||||
name='change-secret-add-or-remove-node'),
|
||||
path('change-secret/<uuid:pk>/assets/', api.ChangSecretAssetsListApi.as_view(), name='change-secret-assets'),
|
||||
|
||||
path('push-account/<uuid:pk>/asset/remove/', api.PushAccountRemoveAssetApi.as_view(), name='push-account-remove-asset'),
|
||||
path('push-account/<uuid:pk>/asset/remove/', api.PushAccountRemoveAssetApi.as_view(),
|
||||
name='push-account-remove-asset'),
|
||||
path('push-accountt/<uuid:pk>/asset/add/', api.PushAccountAddAssetApi.as_view(), name='push-account-add-asset'),
|
||||
path('push-account/<uuid:pk>/nodes/', api.PushAccountNodeAddRemoveApi.as_view(), name='push-account-add-or-remove-node'),
|
||||
path('push-account/<uuid:pk>/nodes/', api.PushAccountNodeAddRemoveApi.as_view(),
|
||||
name='push-account-add-or-remove-node'),
|
||||
path('push-account/<uuid:pk>/assets/', api.PushAccountAssetsListApi.as_view(), name='push-account-assets'),
|
||||
]
|
||||
|
||||
|
|
|
@ -20,7 +20,8 @@ class SecretGenerator:
|
|||
return private_key
|
||||
|
||||
def generate_password(self):
|
||||
length = int(self.password_rules.get('length', DEFAULT_PASSWORD_RULES['length']))
|
||||
length = int(self.password_rules.get('length', 0))
|
||||
length = length if length else DEFAULT_PASSWORD_RULES['length']
|
||||
return random_string(length, special_char=True)
|
||||
|
||||
def get_secret(self):
|
||||
|
|
|
@ -1,7 +1,5 @@
|
|||
from rest_framework.decorators import action
|
||||
from rest_framework.response import Response
|
||||
|
||||
from common.utils import reverse
|
||||
from orgs.mixins.api import OrgBulkModelViewSet
|
||||
from .. import models, serializers
|
||||
|
||||
|
@ -36,4 +34,4 @@ class CommandFilterACLViewSet(OrgBulkModelViewSet):
|
|||
}
|
||||
ticket = serializer.cmd_filter_acl.create_command_review_ticket(**data)
|
||||
info = ticket.get_extra_info_of_review(user=request.user)
|
||||
return info
|
||||
return Response(data=info)
|
||||
|
|
|
@ -20,14 +20,14 @@ class Migration(migrations.Migration):
|
|||
),
|
||||
migrations.AlterModelOptions(
|
||||
name='commandfilteracl',
|
||||
options={'ordering': ('priority', 'name'), 'verbose_name': 'Command acl'},
|
||||
options={'ordering': ('priority', 'date_updated', 'name'), 'verbose_name': 'Command acl'},
|
||||
),
|
||||
migrations.AlterModelOptions(
|
||||
name='loginacl',
|
||||
options={'ordering': ('priority', 'name'), 'verbose_name': 'Login acl'},
|
||||
options={'ordering': ('priority', 'date_updated', 'name'), 'verbose_name': 'Login acl'},
|
||||
),
|
||||
migrations.AlterModelOptions(
|
||||
name='loginassetacl',
|
||||
options={'ordering': ('priority', 'name'), 'verbose_name': 'Login asset acl'},
|
||||
options={'ordering': ('priority', 'date_updated', 'name'), 'verbose_name': 'Login asset acl'},
|
||||
),
|
||||
]
|
||||
|
|
|
@ -5,7 +5,7 @@ from django.utils.translation import ugettext_lazy as _
|
|||
|
||||
from common.db.models import JMSBaseModel
|
||||
from common.utils import contains_ip
|
||||
from orgs.mixins.models import OrgModelMixin
|
||||
from orgs.mixins.models import OrgModelMixin, OrgManager
|
||||
|
||||
__all__ = [
|
||||
'ACLManager',
|
||||
|
@ -67,6 +67,10 @@ class ACLManager(models.Manager):
|
|||
return self.get_queryset().valid()
|
||||
|
||||
|
||||
class OrgACLManager(OrgManager, ACLManager):
|
||||
pass
|
||||
|
||||
|
||||
class BaseACL(JMSBaseModel):
|
||||
name = models.CharField(max_length=128, verbose_name=_('Name'))
|
||||
priority = models.IntegerField(
|
||||
|
@ -82,7 +86,7 @@ class BaseACL(JMSBaseModel):
|
|||
objects = ACLManager.from_queryset(BaseACLQuerySet)()
|
||||
|
||||
class Meta:
|
||||
ordering = ('priority', 'name')
|
||||
ordering = ('priority', 'date_updated', 'name')
|
||||
abstract = True
|
||||
|
||||
def is_action(self, action):
|
||||
|
@ -97,7 +101,7 @@ class UserAssetAccountBaseACL(BaseACL, OrgModelMixin):
|
|||
# username_group
|
||||
accounts = models.JSONField(verbose_name=_('Account'))
|
||||
|
||||
objects = ACLManager.from_queryset(UserAssetAccountACLQuerySet)()
|
||||
objects = OrgACLManager.from_queryset(UserAssetAccountACLQuerySet)()
|
||||
|
||||
class Meta(BaseACL.Meta):
|
||||
unique_together = ('name', 'org_id')
|
||||
|
@ -109,14 +113,14 @@ class UserAssetAccountBaseACL(BaseACL, OrgModelMixin):
|
|||
org_id = None
|
||||
if user:
|
||||
queryset = queryset.filter_user(user.username)
|
||||
if asset:
|
||||
org_id = asset.org_id
|
||||
queryset = queryset.filter_asset(asset.name, asset.address)
|
||||
if account:
|
||||
org_id = account.org_id
|
||||
queryset = queryset.filter_account(account.username)
|
||||
if account_username:
|
||||
queryset = queryset.filter_account(username=account_username)
|
||||
if asset:
|
||||
org_id = asset.org_id
|
||||
queryset = queryset.filter_asset(asset.name, asset.address)
|
||||
if org_id:
|
||||
kwargs['org_id'] = org_id
|
||||
if kwargs:
|
||||
|
|
|
@ -22,7 +22,7 @@ class LoginACLSerializer(BulkModelSerializer):
|
|||
reviewers = ObjectRelatedField(
|
||||
queryset=User.objects, label=_("Reviewers"), many=True, required=False
|
||||
)
|
||||
action = LabeledChoiceField(choices=LoginACL.ActionChoices.choices)
|
||||
action = LabeledChoiceField(choices=LoginACL.ActionChoices.choices, label=_('Action'))
|
||||
reviewers_amount = serializers.IntegerField(
|
||||
read_only=True, source="reviewers.count", label=_("Reviewers amount")
|
||||
)
|
||||
|
@ -52,10 +52,10 @@ class LoginACLSerializer(BulkModelSerializer):
|
|||
action = self.fields.get("action")
|
||||
if not action:
|
||||
return
|
||||
choices = action._choices
|
||||
choices = action.choices
|
||||
if not has_valid_xpack_license():
|
||||
choices.pop(LoginACL.ActionChoices.review, None)
|
||||
action._choices = choices
|
||||
action.choices = choices
|
||||
|
||||
def get_rules_serializer(self):
|
||||
return RuleSerializer()
|
||||
|
|
|
@ -1,16 +0,0 @@
|
|||
from rest_framework import serializers
|
||||
from django.utils.translation import ugettext_lazy as _
|
||||
|
||||
from ..application_category import DBSerializer
|
||||
|
||||
__all__ = ['ClickHouseSerializer']
|
||||
|
||||
|
||||
class ClickHouseSerializer(DBSerializer):
|
||||
port = serializers.IntegerField(
|
||||
default=9000, label=_('Port'), allow_null=True,
|
||||
help_text=_(
|
||||
'Typically, the port is 9000,'
|
||||
'the HTTP interface and the native interface use different ports'
|
||||
),
|
||||
)
|
|
@ -2,21 +2,19 @@
|
|||
#
|
||||
import django_filters
|
||||
from django.db.models import Q
|
||||
from django.utils.translation import ugettext_lazy as _
|
||||
from django.utils.translation import gettext as _
|
||||
from rest_framework.decorators import action
|
||||
from rest_framework.response import Response
|
||||
|
||||
from accounts.tasks import push_accounts_to_assets, verify_accounts_connectivity
|
||||
from accounts.tasks import push_accounts_to_assets_task, verify_accounts_connectivity_task
|
||||
from assets import serializers
|
||||
from assets.exceptions import NotSupportedTemporarilyError
|
||||
from assets.filters import IpInFilterBackend, LabelFilterBackend, NodeFilterBackend
|
||||
from assets.models import Asset, Gateway
|
||||
from assets.tasks import (
|
||||
test_assets_connectivity_manual,
|
||||
update_assets_hardware_info_manual
|
||||
)
|
||||
from assets.tasks import test_assets_connectivity_manual, update_assets_hardware_info_manual
|
||||
from common.api import SuggestionMixin
|
||||
from common.drf.filters import BaseFilterSet
|
||||
from common.utils import get_logger
|
||||
from common.utils import get_logger, is_uuid
|
||||
from orgs.mixins import generics
|
||||
from orgs.mixins.api import OrgBulkModelViewSet
|
||||
from ..mixin import NodeFilterMixin
|
||||
|
@ -29,16 +27,38 @@ __all__ = [
|
|||
|
||||
|
||||
class AssetFilterSet(BaseFilterSet):
|
||||
labels = django_filters.CharFilter(method='filter_labels')
|
||||
platform = django_filters.CharFilter(method='filter_platform')
|
||||
domain = django_filters.CharFilter(method='filter_domain')
|
||||
type = django_filters.CharFilter(field_name="platform__type", lookup_expr="exact")
|
||||
category = django_filters.CharFilter(field_name="platform__category", lookup_expr="exact")
|
||||
platform = django_filters.CharFilter(method='filter_platform')
|
||||
labels = django_filters.CharFilter(method='filter_labels')
|
||||
domain_enabled = django_filters.BooleanFilter(
|
||||
field_name="platform__domain_enabled", lookup_expr="exact"
|
||||
)
|
||||
ping_enabled = django_filters.BooleanFilter(
|
||||
field_name="platform__automation__ping_enabled", lookup_expr="exact"
|
||||
)
|
||||
gather_facts_enabled = django_filters.BooleanFilter(
|
||||
field_name="platform__automation__gather_facts_enabled", lookup_expr="exact"
|
||||
)
|
||||
change_secret_enabled = django_filters.BooleanFilter(
|
||||
field_name="platform__automation__change_secret_enabled", lookup_expr="exact"
|
||||
)
|
||||
push_account_enabled = django_filters.BooleanFilter(
|
||||
field_name="platform__automation__push_account_enabled", lookup_expr="exact"
|
||||
)
|
||||
verify_account_enabled = django_filters.BooleanFilter(
|
||||
field_name="platform__automation__verify_account_enabled", lookup_expr="exact"
|
||||
)
|
||||
gather_accounts_enabled = django_filters.BooleanFilter(
|
||||
field_name="platform__automation__gather_accounts_enabled", lookup_expr="exact"
|
||||
)
|
||||
|
||||
class Meta:
|
||||
model = Asset
|
||||
fields = [
|
||||
"id", "name", "address", "is_active", "labels",
|
||||
"type", "category", "platform"
|
||||
"type", "category", "platform",
|
||||
]
|
||||
|
||||
@staticmethod
|
||||
|
@ -48,13 +68,21 @@ class AssetFilterSet(BaseFilterSet):
|
|||
else:
|
||||
return queryset.filter(platform__name=value)
|
||||
|
||||
@staticmethod
|
||||
def filter_domain(queryset, name, value):
|
||||
if is_uuid(value):
|
||||
return queryset.filter(domain_id=value)
|
||||
else:
|
||||
return queryset.filter(domain__name__contains=value)
|
||||
|
||||
@staticmethod
|
||||
def filter_labels(queryset, name, value):
|
||||
if ':' in value:
|
||||
n, v = value.split(':', 1)
|
||||
queryset = queryset.filter(labels__name=n, labels__value=v)
|
||||
else:
|
||||
queryset = queryset.filter(Q(labels__name=value) | Q(labels__value=value))
|
||||
q = Q(labels__name__contains=value) | Q(labels__value__contains=value)
|
||||
queryset = queryset.filter(q)
|
||||
return queryset
|
||||
|
||||
|
||||
|
@ -65,18 +93,19 @@ class AssetViewSet(SuggestionMixin, NodeFilterMixin, OrgBulkModelViewSet):
|
|||
model = Asset
|
||||
filterset_class = AssetFilterSet
|
||||
search_fields = ("name", "address")
|
||||
ordering_fields = ("name", "address", "connectivity")
|
||||
ordering = ("name", "connectivity")
|
||||
serializer_classes = (
|
||||
("default", serializers.AssetSerializer),
|
||||
("platform", serializers.PlatformSerializer),
|
||||
("suggestion", serializers.MiniAssetSerializer),
|
||||
("gateways", serializers.GatewaySerializer),
|
||||
("spec_info", serializers.SpecSerializer)
|
||||
)
|
||||
rbac_perms = (
|
||||
("match", "assets.match_asset"),
|
||||
("platform", "assets.view_platform"),
|
||||
("gateways", "assets.view_gateway"),
|
||||
("spec_info", "assets.view_asset"),
|
||||
)
|
||||
extra_filter_backends = [LabelFilterBackend, IpInFilterBackend, NodeFilterBackend]
|
||||
|
||||
|
@ -94,6 +123,11 @@ class AssetViewSet(SuggestionMixin, NodeFilterMixin, OrgBulkModelViewSet):
|
|||
serializer = super().get_serializer(instance=asset.platform)
|
||||
return Response(serializer.data)
|
||||
|
||||
@action(methods=["GET"], detail=True, url_path="spec-info")
|
||||
def spec_info(self, *args, **kwargs):
|
||||
asset = super().get_object()
|
||||
return Response(asset.spec_info)
|
||||
|
||||
@action(methods=["GET"], detail=True, url_path="gateways")
|
||||
def gateways(self, *args, **kwargs):
|
||||
asset = self.get_object()
|
||||
|
@ -103,16 +137,26 @@ class AssetViewSet(SuggestionMixin, NodeFilterMixin, OrgBulkModelViewSet):
|
|||
gateways = asset.domain.gateways
|
||||
return self.get_paginated_response_from_queryset(gateways)
|
||||
|
||||
def create(self, request, *args, **kwargs):
|
||||
if request.path.find('/api/v1/assets/assets/') > -1:
|
||||
error = _('Cannot create asset directly, you should create a host or other')
|
||||
return Response({'error': error}, status=400)
|
||||
return super().create(request, *args, **kwargs)
|
||||
|
||||
|
||||
class AssetsTaskMixin:
|
||||
def perform_assets_task(self, serializer):
|
||||
data = serializer.validated_data
|
||||
assets = data.get("assets", [])
|
||||
asset_ids = [asset.id for asset in assets]
|
||||
|
||||
if data["action"] == "refresh":
|
||||
task = update_assets_hardware_info_manual.delay(asset_ids)
|
||||
task = update_assets_hardware_info_manual(assets)
|
||||
else:
|
||||
task = test_assets_connectivity_manual.delay(asset_ids)
|
||||
asset = assets[0]
|
||||
if not asset.auto_info['ansible_enabled'] or \
|
||||
not asset.auto_info['ping_enabled']:
|
||||
raise NotSupportedTemporarilyError()
|
||||
task = test_assets_connectivity_manual(assets)
|
||||
return task
|
||||
|
||||
def perform_create(self, serializer):
|
||||
|
@ -138,9 +182,9 @@ class AssetTaskCreateApi(AssetsTaskMixin, generics.CreateAPIView):
|
|||
def check_permissions(self, request):
|
||||
action_perm_require = {
|
||||
"refresh": "assets.refresh_assethardwareinfo",
|
||||
"push_account": "accounts.add_pushaccountexecution",
|
||||
"push_account": "accounts.push_account",
|
||||
"test": "assets.test_assetconnectivity",
|
||||
"test_account": "assets.test_account",
|
||||
"test_account": "accounts.verify_account",
|
||||
}
|
||||
_action = request.data.get("action")
|
||||
perm_required = action_perm_require.get(_action)
|
||||
|
@ -160,12 +204,12 @@ class AssetTaskCreateApi(AssetsTaskMixin, generics.CreateAPIView):
|
|||
if not accounts:
|
||||
accounts = asset.accounts.all()
|
||||
|
||||
asset_ids = [asset.id]
|
||||
account_ids = accounts.values_list("id", flat=True)
|
||||
account_ids = accounts.values_list('id', flat=True)
|
||||
account_ids = [str(_id) for _id in account_ids]
|
||||
if action == "push_account":
|
||||
task = push_accounts_to_assets.delay(account_ids, asset_ids)
|
||||
task = push_accounts_to_assets_task.delay(account_ids)
|
||||
elif action == "test_account":
|
||||
task = verify_accounts_connectivity.delay(account_ids, asset_ids)
|
||||
task = verify_accounts_connectivity_task.delay(account_ids)
|
||||
else:
|
||||
task = None
|
||||
return task
|
||||
|
|
|
@ -1,5 +1,8 @@
|
|||
from rest_framework.decorators import action
|
||||
from rest_framework.response import Response
|
||||
|
||||
from assets.models import Host, Asset
|
||||
from assets.serializers import HostSerializer
|
||||
from assets.serializers import HostSerializer, HostInfoSerializer
|
||||
from .asset import AssetViewSet
|
||||
|
||||
__all__ = ['HostViewSet']
|
||||
|
@ -12,4 +15,10 @@ class HostViewSet(AssetViewSet):
|
|||
def get_serializer_classes(self):
|
||||
serializer_classes = super().get_serializer_classes()
|
||||
serializer_classes['default'] = HostSerializer
|
||||
serializer_classes['info'] = HostInfoSerializer
|
||||
return serializer_classes
|
||||
|
||||
@action(methods=["GET"], detail=True, url_path="info")
|
||||
def info(self, *args, **kwargs):
|
||||
asset = super().get_object()
|
||||
return Response(asset.info)
|
||||
|
|
|
@ -1,18 +1,18 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
#
|
||||
from rest_framework.generics import ListAPIView
|
||||
from django.shortcuts import get_object_or_404
|
||||
from django.db.models import Q
|
||||
from django.shortcuts import get_object_or_404
|
||||
from rest_framework.generics import ListAPIView
|
||||
|
||||
from assets.models import Asset
|
||||
from common.utils import get_logger
|
||||
from users.models import User, UserGroup
|
||||
from users.serializers import UserSerializer, UserGroupSerializer
|
||||
from users.filters import UserFilter
|
||||
from orgs.mixins import generics
|
||||
from perms.filters import AssetPermissionFilter
|
||||
from perms.models import AssetPermission
|
||||
from perms.serializers import AssetPermissionSerializer
|
||||
from perms.filters import AssetPermissionFilter
|
||||
from orgs.mixins import generics
|
||||
from assets.models import Asset
|
||||
from users.filters import UserFilter
|
||||
from users.models import User, UserGroup
|
||||
from users.serializers import UserSerializer, UserGroupSerializer
|
||||
|
||||
logger = get_logger(__file__)
|
||||
__all__ = [
|
||||
|
@ -56,6 +56,7 @@ class AssetPermUserListApi(BaseAssetPermUserOrUserGroupListApi):
|
|||
|
||||
class AssetPermUserGroupListApi(BaseAssetPermUserOrUserGroupListApi):
|
||||
serializer_class = UserGroupSerializer
|
||||
queryset = UserGroup.objects.none()
|
||||
|
||||
def get_queryset(self):
|
||||
perms = self.get_asset_related_perms()
|
||||
|
@ -124,4 +125,3 @@ class AssetPermUserGroupPermissionsListApi(BaseAssetRelatedPermissionListApi):
|
|||
user_group_id = self.kwargs.get('perm_user_group_id')
|
||||
user_group = get_object_or_404(UserGroup, pk=user_group_id)
|
||||
return user_group
|
||||
|
||||
|
|
|
@ -4,9 +4,10 @@ from django.views.generic.detail import SingleObjectMixin
|
|||
from rest_framework.serializers import ValidationError
|
||||
from rest_framework.views import APIView, Response
|
||||
|
||||
from assets.tasks import test_gateways_connectivity_manual
|
||||
from common.utils import get_logger
|
||||
from assets.tasks import test_assets_connectivity_manual
|
||||
from orgs.mixins.api import OrgBulkModelViewSet
|
||||
from .asset import HostViewSet
|
||||
from .. import serializers
|
||||
from ..models import Domain, Gateway
|
||||
|
||||
|
@ -18,21 +19,23 @@ class DomainViewSet(OrgBulkModelViewSet):
|
|||
model = Domain
|
||||
filterset_fields = ("name",)
|
||||
search_fields = filterset_fields
|
||||
serializer_class = serializers.DomainSerializer
|
||||
ordering_fields = ('name',)
|
||||
ordering = ('name',)
|
||||
|
||||
def get_serializer_class(self):
|
||||
if self.request.query_params.get('gateway'):
|
||||
return serializers.DomainWithGatewaySerializer
|
||||
return super().get_serializer_class()
|
||||
return serializers.DomainSerializer
|
||||
|
||||
|
||||
class GatewayViewSet(OrgBulkModelViewSet):
|
||||
class GatewayViewSet(HostViewSet):
|
||||
perm_model = Gateway
|
||||
filterset_fields = ("domain__name", "name", "domain")
|
||||
search_fields = ("domain__name",)
|
||||
serializer_class = serializers.GatewaySerializer
|
||||
|
||||
def get_serializer_classes(self):
|
||||
serializer_classes = super().get_serializer_classes()
|
||||
serializer_classes['default'] = serializers.GatewaySerializer
|
||||
return serializer_classes
|
||||
|
||||
def get_queryset(self):
|
||||
queryset = Domain.get_gateway_queryset()
|
||||
|
@ -55,5 +58,5 @@ class GatewayTestConnectionApi(SingleObjectMixin, APIView):
|
|||
local_port = int(local_port)
|
||||
except ValueError:
|
||||
raise ValidationError({'port': _('Number required')})
|
||||
task = test_assets_connectivity_manual.delay([gateway.id], local_port)
|
||||
task = test_gateways_connectivity_manual([gateway.id], local_port)
|
||||
return Response({'task': task.id})
|
||||
|
|
|
@ -1,24 +1,25 @@
|
|||
# ~*~ coding: utf-8 ~*~
|
||||
from functools import partial
|
||||
from collections import namedtuple, defaultdict
|
||||
from functools import partial
|
||||
|
||||
from django.db.models.signals import m2m_changed
|
||||
from django.utils.translation import ugettext_lazy as _
|
||||
from rest_framework import status
|
||||
from rest_framework.response import Response
|
||||
from rest_framework.decorators import action
|
||||
from rest_framework.generics import get_object_or_404
|
||||
from rest_framework.response import Response
|
||||
from rest_framework.serializers import ValidationError
|
||||
|
||||
from assets.models import Asset
|
||||
from common.const.http import POST
|
||||
from common.utils import get_logger
|
||||
from common.api import SuggestionMixin
|
||||
from common.exceptions import SomeoneIsDoingThis
|
||||
from common.const.http import POST
|
||||
from common.const.signals import PRE_REMOVE, POST_REMOVE
|
||||
from common.exceptions import SomeoneIsDoingThis
|
||||
from common.utils import get_logger
|
||||
from orgs.mixins import generics
|
||||
from orgs.utils import current_org
|
||||
from orgs.mixins.api import OrgBulkModelViewSet
|
||||
from orgs.utils import current_org
|
||||
from rbac.permissions import RBACPermission
|
||||
from .. import serializers
|
||||
from ..models import Node
|
||||
from ..tasks import (
|
||||
|
@ -100,6 +101,10 @@ class NodeAddAssetsApi(generics.UpdateAPIView):
|
|||
model = Node
|
||||
serializer_class = serializers.NodeAssetsSerializer
|
||||
instance = None
|
||||
permission_classes = (RBACPermission,)
|
||||
rbac_perms = {
|
||||
'PUT': 'assets.change_assetnodes',
|
||||
}
|
||||
|
||||
def perform_update(self, serializer):
|
||||
assets = serializer.validated_data.get('assets')
|
||||
|
@ -111,6 +116,10 @@ class NodeRemoveAssetsApi(generics.UpdateAPIView):
|
|||
model = Node
|
||||
serializer_class = serializers.NodeAssetsSerializer
|
||||
instance = None
|
||||
permission_classes = (RBACPermission,)
|
||||
rbac_perms = {
|
||||
'PUT': 'assets.change_assetnodes',
|
||||
}
|
||||
|
||||
def perform_update(self, serializer):
|
||||
assets = serializer.validated_data.get('assets')
|
||||
|
@ -129,6 +138,10 @@ class MoveAssetsToNodeApi(generics.UpdateAPIView):
|
|||
model = Node
|
||||
serializer_class = serializers.NodeAssetsSerializer
|
||||
instance = None
|
||||
permission_classes = (RBACPermission,)
|
||||
rbac_perms = {
|
||||
'PUT': 'assets.change_assetnodes',
|
||||
}
|
||||
|
||||
def perform_update(self, serializer):
|
||||
assets = serializer.validated_data.get('assets')
|
||||
|
@ -210,7 +223,7 @@ class NodeTaskCreateApi(generics.CreateAPIView):
|
|||
return
|
||||
|
||||
if action == "refresh":
|
||||
task = update_node_assets_hardware_info_manual.delay(node.id)
|
||||
task = update_node_assets_hardware_info_manual(node)
|
||||
else:
|
||||
task = test_node_assets_connectivity_manual.delay(node.id)
|
||||
task = test_node_assets_connectivity_manual(node)
|
||||
self.set_serializer_data(serializer, task)
|
||||
|
|
|
@ -1,9 +1,8 @@
|
|||
from jumpserver.utils import has_valid_xpack_license
|
||||
from assets.const import AllTypes
|
||||
from assets.models import Platform
|
||||
from assets.serializers import PlatformSerializer
|
||||
from common.api import JMSModelViewSet
|
||||
from common.serializers import GroupedChoiceSerializer
|
||||
from assets.models import Platform
|
||||
from assets.const import AllTypes
|
||||
from assets.serializers import PlatformSerializer
|
||||
|
||||
__all__ = ['AssetPlatformViewSet']
|
||||
|
||||
|
|
|
@ -119,7 +119,7 @@ class NodeChildrenAsTreeApi(SerializeToTreeNodeMixin, NodeChildrenApi):
|
|||
query_all = self.request.query_params.get("all", "0") == "all"
|
||||
include_assets = self.request.query_params.get('assets', '0') == '1'
|
||||
if not self.instance or not include_assets:
|
||||
return []
|
||||
return Asset.objects.none()
|
||||
if query_all:
|
||||
assets = self.instance.get_all_assets_for_tree()
|
||||
else:
|
||||
|
|
|
@ -14,3 +14,5 @@ class AssetsConfig(AppConfig):
|
|||
def ready(self):
|
||||
super().ready()
|
||||
from . import signal_handlers
|
||||
from . import tasks
|
||||
|
||||
|
|
|
@ -1,3 +1,4 @@
|
|||
import json
|
||||
import os
|
||||
import shutil
|
||||
from collections import defaultdict
|
||||
|
@ -8,6 +9,7 @@ import yaml
|
|||
from django.conf import settings
|
||||
from django.utils import timezone
|
||||
from django.utils.translation import gettext as _
|
||||
from sshtunnel import SSHTunnelForwarder
|
||||
|
||||
from assets.automations.methods import platform_automation_methods
|
||||
from common.utils import get_logger, lazyproperty
|
||||
|
@ -25,6 +27,7 @@ class PlaybookCallback(DefaultCallback):
|
|||
class BasePlaybookManager:
|
||||
bulk_size = 100
|
||||
ansible_account_policy = 'privileged_first'
|
||||
ansible_account_prefer = 'root,Administrator'
|
||||
|
||||
def __init__(self, execution):
|
||||
self.execution = execution
|
||||
|
@ -38,6 +41,7 @@ class BasePlaybookManager:
|
|||
# 避免一个 playbook 中包含太多的主机
|
||||
self.method_hosts_mapper = defaultdict(list)
|
||||
self.playbooks = []
|
||||
self.gateway_servers = dict()
|
||||
|
||||
@property
|
||||
def platform_automation_methods(self):
|
||||
|
@ -50,8 +54,7 @@ class BasePlaybookManager:
|
|||
def get_assets_group_by_platform(self):
|
||||
return self.execution.all_assets_group_by_platform()
|
||||
|
||||
@lazyproperty
|
||||
def runtime_dir(self):
|
||||
def prepare_runtime_dir(self):
|
||||
ansible_dir = settings.ANSIBLE_DIR
|
||||
task_name = self.execution.snapshot['name']
|
||||
dir_name = '{}_{}'.format(task_name.replace(' ', '_'), self.execution.id)
|
||||
|
@ -63,6 +66,14 @@ class BasePlaybookManager:
|
|||
os.makedirs(path, exist_ok=True, mode=0o755)
|
||||
return path
|
||||
|
||||
@lazyproperty
|
||||
def runtime_dir(self):
|
||||
path = self.prepare_runtime_dir()
|
||||
if settings.DEBUG_DEV:
|
||||
msg = 'Ansible runtime dir: {}'.format(path)
|
||||
print(msg)
|
||||
return path
|
||||
|
||||
@staticmethod
|
||||
def write_cert_to_file(filename, content):
|
||||
with open(filename, 'w') as f:
|
||||
|
@ -73,7 +84,7 @@ class BasePlaybookManager:
|
|||
if not path_dir:
|
||||
return host
|
||||
|
||||
specific = host.get('jms_asset', {}).get('specific', {})
|
||||
specific = host.get('jms_asset', {}).get('secret_info', {})
|
||||
cert_fields = ('ca_cert', 'client_key', 'client_cert')
|
||||
filtered = list(filter(lambda x: specific.get(x), cert_fields))
|
||||
if not filtered:
|
||||
|
@ -87,7 +98,7 @@ class BasePlaybookManager:
|
|||
result = self.write_cert_to_file(
|
||||
os.path.join(cert_dir, f), specific.get(f)
|
||||
)
|
||||
host['jms_asset']['specific'][f] = result
|
||||
host['jms_asset']['secret_info'][f] = result
|
||||
return host
|
||||
|
||||
def host_callback(self, host, automation=None, **kwargs):
|
||||
|
@ -123,6 +134,7 @@ class BasePlaybookManager:
|
|||
def generate_inventory(self, platformed_assets, inventory_path):
|
||||
inventory = JMSInventory(
|
||||
assets=platformed_assets,
|
||||
account_prefer=self.ansible_account_prefer,
|
||||
account_policy=self.ansible_account_policy,
|
||||
host_callback=self.host_callback,
|
||||
)
|
||||
|
@ -148,8 +160,12 @@ class BasePlaybookManager:
|
|||
return sub_playbook_path
|
||||
|
||||
def get_runners(self):
|
||||
assets_group_by_platform = self.get_assets_group_by_platform()
|
||||
if settings.DEBUG_DEV:
|
||||
msg = 'Assets group by platform: {}'.format(dict(assets_group_by_platform))
|
||||
print(msg)
|
||||
runners = []
|
||||
for platform, assets in self.get_assets_group_by_platform().items():
|
||||
for platform, assets in assets_group_by_platform.items():
|
||||
assets_bulked = [assets[i:i + self.bulk_size] for i in range(0, len(assets), self.bulk_size)]
|
||||
|
||||
for i, _assets in enumerate(assets_bulked, start=1):
|
||||
|
@ -165,6 +181,12 @@ class BasePlaybookManager:
|
|||
self.runtime_dir,
|
||||
callback=PlaybookCallback(),
|
||||
)
|
||||
|
||||
with open(inventory_path, 'r') as f:
|
||||
inventory_data = json.load(f)
|
||||
if not inventory_data['all'].get('hosts'):
|
||||
continue
|
||||
|
||||
runners.append(runer)
|
||||
return runners
|
||||
|
||||
|
@ -172,7 +194,7 @@ class BasePlaybookManager:
|
|||
pass
|
||||
|
||||
def on_host_error(self, host, error, result):
|
||||
pass
|
||||
print('host error: {} -> {}'.format(host, error))
|
||||
|
||||
def on_runner_success(self, runner, cb):
|
||||
summary = cb.summary
|
||||
|
@ -182,8 +204,7 @@ class BasePlaybookManager:
|
|||
if state == 'ok':
|
||||
self.on_host_success(host, result)
|
||||
elif state == 'skipped':
|
||||
# TODO
|
||||
print('skipped: ', hosts)
|
||||
pass
|
||||
else:
|
||||
error = hosts.get(host)
|
||||
self.on_host_error(host, error, result)
|
||||
|
@ -191,15 +212,68 @@ class BasePlaybookManager:
|
|||
def on_runner_failed(self, runner, e):
|
||||
print("Runner failed: {} {}".format(e, self))
|
||||
|
||||
@staticmethod
|
||||
def file_to_json(path):
|
||||
with open(path, 'r') as f:
|
||||
d = json.load(f)
|
||||
return d
|
||||
|
||||
@staticmethod
|
||||
def json_dumps(data):
|
||||
return json.dumps(data, indent=4, sort_keys=True)
|
||||
|
||||
@staticmethod
|
||||
def json_to_file(path, data):
|
||||
with open(path, 'w') as f:
|
||||
json.dump(data, f, indent=4, sort_keys=True)
|
||||
|
||||
def local_gateway_prepare(self, runner):
|
||||
info = self.file_to_json(runner.inventory)
|
||||
servers = []
|
||||
for k, host in info['all']['hosts'].items():
|
||||
jms_asset, jms_gateway = host['jms_asset'], host.get('gateway')
|
||||
if not jms_gateway:
|
||||
continue
|
||||
server = SSHTunnelForwarder(
|
||||
(jms_gateway['address'], jms_gateway['port']),
|
||||
ssh_username=jms_gateway['username'],
|
||||
ssh_password=jms_gateway['secret'],
|
||||
remote_bind_address=(jms_asset['address'], jms_asset['port'])
|
||||
)
|
||||
server.start()
|
||||
jms_asset['address'] = '127.0.0.1'
|
||||
jms_asset['port'] = server.local_bind_port
|
||||
servers.append(server)
|
||||
self.json_to_file(runner.inventory, info)
|
||||
self.gateway_servers[runner.id] = servers
|
||||
|
||||
def local_gateway_clean(self, runner):
|
||||
servers = self.gateway_servers.get(runner.id, [])
|
||||
for s in servers:
|
||||
try:
|
||||
s.stop()
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
def before_runner_start(self, runner):
|
||||
pass
|
||||
self.local_gateway_prepare(runner)
|
||||
|
||||
def after_runner_end(self, runner):
|
||||
self.local_gateway_clean(runner)
|
||||
|
||||
def delete_runtime_dir(self):
|
||||
if settings.DEBUG_DEV:
|
||||
return
|
||||
shutil.rmtree(self.runtime_dir)
|
||||
|
||||
def run(self, *args, **kwargs):
|
||||
runners = self.get_runners()
|
||||
if len(runners) > 1:
|
||||
print("### 分批次执行开始任务, 总共 {}\n".format(len(runners)))
|
||||
else:
|
||||
print("### 分次执行任务, 总共 {}\n".format(len(runners)))
|
||||
elif len(runners) == 1:
|
||||
print(">>> 开始执行任务\n")
|
||||
else:
|
||||
print("### 没有需要执行的任务\n")
|
||||
|
||||
self.execution.date_start = timezone.now()
|
||||
for i, runner in enumerate(runners, start=1):
|
||||
|
@ -211,7 +285,10 @@ class BasePlaybookManager:
|
|||
self.on_runner_success(runner, cb)
|
||||
except Exception as e:
|
||||
self.on_runner_failed(runner, e)
|
||||
print('\n')
|
||||
finally:
|
||||
self.after_runner_end(runner)
|
||||
print('\n')
|
||||
self.execution.status = 'success'
|
||||
self.execution.date_finished = timezone.now()
|
||||
self.execution.save()
|
||||
self.delete_runtime_dir()
|
||||
|
|
|
@ -10,12 +10,12 @@
|
|||
login_password: "{{ jms_account.secret }}"
|
||||
login_host: "{{ jms_asset.address }}"
|
||||
login_port: "{{ jms_asset.port }}"
|
||||
login_database: "{{ jms_asset.specific.db_name }}"
|
||||
ssl: "{{ jms_asset.specific.use_ssl }}"
|
||||
ssl_ca_certs: "{{ jms_asset.specific.ca_cert }}"
|
||||
ssl_certfile: "{{ jms_asset.specific.client_key }}"
|
||||
login_database: "{{ jms_asset.spec_info.db_name }}"
|
||||
ssl: "{{ jms_asset.spec_info.use_ssl }}"
|
||||
ssl_ca_certs: "{{ jms_asset.secret_info.ca_cert }}"
|
||||
ssl_certfile: "{{ jms_asset.secret_info.client_key }}"
|
||||
connection_options:
|
||||
- tlsAllowInvalidHostnames: "{{ jms_asset.specific.allow_invalid_cert}}"
|
||||
- tlsAllowInvalidHostnames: "{{ jms_asset.spec_info.allow_invalid_cert}}"
|
||||
register: db_info
|
||||
|
||||
- name: Define info by set_fact
|
||||
|
|
|
@ -10,7 +10,7 @@
|
|||
login_password: "{{ jms_account.secret }}"
|
||||
login_host: "{{ jms_asset.address }}"
|
||||
login_port: "{{ jms_asset.port }}"
|
||||
login_database: "{{ jms_asset.specific.db_name }}"
|
||||
login_database: "{{ jms_asset.spec_info.db_name }}"
|
||||
mode: "{{ jms_account.mode }}"
|
||||
register: db_info
|
||||
|
||||
|
|
|
@ -10,7 +10,7 @@
|
|||
login_password: "{{ jms_account.secret }}"
|
||||
login_host: "{{ jms_asset.address }}"
|
||||
login_port: "{{ jms_asset.port }}"
|
||||
login_db: "{{ jms_asset.specific.db_name }}"
|
||||
login_db: "{{ jms_asset.spec_info.db_name }}"
|
||||
register: db_info
|
||||
|
||||
- name: Define info by set_fact
|
||||
|
|
|
@ -4,16 +4,19 @@
|
|||
- name: Get info
|
||||
ansible.builtin.set_fact:
|
||||
info:
|
||||
arch: "{{ ansible_architecture }}"
|
||||
distribution: "{{ ansible_distribution }}"
|
||||
distribution_version: "{{ ansible_distribution_version }}"
|
||||
kernel: "{{ ansible_kernel }}"
|
||||
vendor: "{{ ansible_system_vendor }}"
|
||||
model: "{{ ansible_product_name }}"
|
||||
sn: "{{ ansible_product_serial }}"
|
||||
cpu_model: "{{ ansible_processor }}"
|
||||
cpu_count: "{{ ansible_processor_count }}"
|
||||
cpu_cores: "{{ ansible_processor_cores }}"
|
||||
cpu_vcpus: "{{ ansible_processor_vcpus }}"
|
||||
memory: "{{ ansible_memtotal_mb }}"
|
||||
disk_total: "{{ (ansible_mounts | map(attribute='size_total') | sum / 1024 / 1024 / 1024) | round(2) }}"
|
||||
distribution: "{{ ansible_distribution }}"
|
||||
distribution_version: "{{ ansible_distribution_version }}"
|
||||
arch: "{{ ansible_architecture }}"
|
||||
kernel: "{{ ansible_kernel }}"
|
||||
|
||||
- debug:
|
||||
var: info
|
||||
|
|
|
@ -23,6 +23,8 @@ class GatherFactsManager(BasePlaybookManager):
|
|||
info = result.get('debug', {}).get('res', {}).get('info', {})
|
||||
asset = self.host_asset_mapper.get(host)
|
||||
if asset and info:
|
||||
for k, v in info.items():
|
||||
info[k] = v.strip() if isinstance(v, str) else v
|
||||
asset.info = info
|
||||
asset.save()
|
||||
else:
|
||||
|
|
|
@ -10,9 +10,9 @@
|
|||
login_password: "{{ jms_account.secret }}"
|
||||
login_host: "{{ jms_asset.address }}"
|
||||
login_port: "{{ jms_asset.port }}"
|
||||
login_database: "{{ jms_asset.specific.db_name }}"
|
||||
ssl: "{{ jms_asset.specific.use_ssl }}"
|
||||
ssl_ca_certs: "{{ jms_asset.specific.ca_cert }}"
|
||||
ssl_certfile: "{{ jms_asset.specific.client_key }}"
|
||||
login_database: "{{ jms_asset.spec_info.db_name }}"
|
||||
ssl: "{{ jms_asset.spec_info.use_ssl }}"
|
||||
ssl_ca_certs: "{{ jms_asset.secret_info.ca_cert }}"
|
||||
ssl_certfile: "{{ jms_asset.secret_info.client_key }}"
|
||||
connection_options:
|
||||
- tlsAllowInvalidHostnames: "{{ jms_asset.specific.allow_invalid_cert}}"
|
||||
- tlsAllowInvalidHostnames: "{{ jms_asset.spec_info.allow_invalid_cert}}"
|
||||
|
|
|
@ -10,5 +10,5 @@
|
|||
login_password: "{{ jms_account.secret }}"
|
||||
login_host: "{{ jms_asset.address }}"
|
||||
login_port: "{{ jms_asset.port }}"
|
||||
login_database: "{{ jms_asset.specific.db_name }}"
|
||||
login_database: "{{ jms_asset.spec_info.db_name }}"
|
||||
mode: "{{ jms_account.mode }}"
|
||||
|
|
|
@ -10,4 +10,6 @@
|
|||
login_password: "{{ jms_account.secret }}"
|
||||
login_host: "{{ jms_asset.address }}"
|
||||
login_port: "{{ jms_asset.port }}"
|
||||
login_db: "{{ jms_asset.specific.db_name }}"
|
||||
login_db: "{{ jms_asset.spec_info.db_name }}"
|
||||
register: result
|
||||
failed_when: not result.is_available
|
||||
|
|
|
@ -10,6 +10,6 @@
|
|||
login_password: "{{ jms_account.secret }}"
|
||||
login_host: "{{ jms_asset.address }}"
|
||||
login_port: "{{ jms_asset.port }}"
|
||||
name: '{{ jms_asset.specific.db_name }}'
|
||||
name: '{{ jms_asset.spec_info.db_name }}'
|
||||
script: |
|
||||
SELECT @@version
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
from common.utils import get_logger
|
||||
from assets.const import AutomationTypes, Connectivity
|
||||
from common.utils import get_logger
|
||||
from ..base.manager import BasePlaybookManager
|
||||
|
||||
logger = get_logger(__name__)
|
||||
|
@ -28,7 +28,7 @@ class PingManager(BasePlaybookManager):
|
|||
|
||||
def on_host_error(self, host, error, result):
|
||||
asset, account = self.host_asset_and_account_mapper.get(host)
|
||||
asset.set_connectivity(Connectivity.FAILED)
|
||||
asset.set_connectivity(Connectivity.ERR)
|
||||
if not account:
|
||||
return
|
||||
account.set_connectivity(Connectivity.FAILED)
|
||||
account.set_connectivity(Connectivity.ERR)
|
||||
|
|
|
@ -1,12 +1,12 @@
|
|||
import socket
|
||||
import paramiko
|
||||
|
||||
import paramiko
|
||||
from django.utils import timezone
|
||||
from django.utils.translation import ugettext_lazy as _
|
||||
|
||||
from common.utils import get_logger
|
||||
from assets.models import Gateway
|
||||
from assets.const import AutomationTypes, Connectivity
|
||||
from assets.models import Gateway
|
||||
from common.utils import get_logger
|
||||
|
||||
logger = get_logger(__name__)
|
||||
|
||||
|
@ -33,7 +33,7 @@ class PingGatewayManager:
|
|||
err = _('No account')
|
||||
return False, err
|
||||
|
||||
logger.debug('Test account: {}'.format(account))
|
||||
print('- ' + _('Asset, {}, using account {}').format(gateway, account))
|
||||
try:
|
||||
proxy.connect(
|
||||
gateway.address,
|
||||
|
@ -91,7 +91,7 @@ class PingGatewayManager:
|
|||
|
||||
@staticmethod
|
||||
def on_host_success(gateway, account):
|
||||
logger.info('\033[32m {} -> {}\033[0m\n'.format(gateway, account))
|
||||
print('\033[32m {} -> {}\033[0m\n'.format(gateway, account))
|
||||
gateway.set_connectivity(Connectivity.OK)
|
||||
if not account:
|
||||
return
|
||||
|
@ -99,15 +99,15 @@ class PingGatewayManager:
|
|||
|
||||
@staticmethod
|
||||
def on_host_error(gateway, account, error):
|
||||
logger.info('\033[31m {} -> {} 原因: {} \033[0m\n'.format(gateway, account, error))
|
||||
gateway.set_connectivity(Connectivity.FAILED)
|
||||
print('\033[31m {} -> {} 原因: {} \033[0m\n'.format(gateway, account, error))
|
||||
gateway.set_connectivity(Connectivity.ERR)
|
||||
if not account:
|
||||
return
|
||||
account.set_connectivity(Connectivity.FAILED)
|
||||
account.set_connectivity(Connectivity.ERR)
|
||||
|
||||
@staticmethod
|
||||
def before_runner_start():
|
||||
logger.info(">>> 开始执行测试网关可连接性任务")
|
||||
print(">>> 开始执行测试网关可连接性任务")
|
||||
|
||||
def get_accounts(self, gateway):
|
||||
account = gateway.select_account
|
||||
|
|
|
@ -3,9 +3,9 @@ from django.utils.translation import ugettext_lazy as _
|
|||
|
||||
|
||||
class Connectivity(TextChoices):
|
||||
UNKNOWN = 'unknown', _('Unknown')
|
||||
UNKNOWN = '-', _('Unknown')
|
||||
OK = 'ok', _('Ok')
|
||||
FAILED = 'failed', _('Failed')
|
||||
ERR = 'err', _('Error')
|
||||
|
||||
|
||||
class AutomationTypes(TextChoices):
|
||||
|
|
|
@ -30,7 +30,7 @@ class DatabaseTypes(BaseType):
|
|||
'ansible_connection': 'local',
|
||||
},
|
||||
'ping_enabled': True,
|
||||
'gather_facts_enabled': True,
|
||||
'gather_facts_enabled': False,
|
||||
'gather_accounts_enabled': True,
|
||||
'verify_account_enabled': True,
|
||||
'change_secret_enabled': True,
|
||||
|
@ -38,9 +38,21 @@ class DatabaseTypes(BaseType):
|
|||
},
|
||||
cls.REDIS: {
|
||||
'ansible_enabled': False,
|
||||
'ping_enabled': False,
|
||||
'gather_facts_enabled': False,
|
||||
'gather_accounts_enabled': False,
|
||||
'verify_account_enabled': False,
|
||||
'change_secret_enabled': False,
|
||||
'push_account_enabled': False,
|
||||
},
|
||||
cls.CLICKHOUSE: {
|
||||
'ansible_enabled': False,
|
||||
'ping_enabled': False,
|
||||
'gather_facts_enabled': False,
|
||||
'gather_accounts_enabled': False,
|
||||
'verify_account_enabled': False,
|
||||
'change_secret_enabled': False,
|
||||
'push_account_enabled': False,
|
||||
},
|
||||
}
|
||||
return constrains
|
||||
|
@ -63,7 +75,20 @@ class DatabaseTypes(BaseType):
|
|||
cls.SQLSERVER: [{'name': 'SQLServer'}],
|
||||
cls.CLICKHOUSE: [{'name': 'ClickHouse'}],
|
||||
cls.MONGODB: [{'name': 'MongoDB'}],
|
||||
cls.REDIS: [{'name': 'Redis'}],
|
||||
cls.REDIS: [
|
||||
{
|
||||
'name': 'Redis',
|
||||
'protocols_setting': {
|
||||
'redis': {'auth_username': False}
|
||||
}
|
||||
},
|
||||
{
|
||||
'name': 'Redis6+',
|
||||
'protocols_setting': {
|
||||
'redis': {'auth_username': True}
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
@classmethod
|
||||
|
|
|
@ -31,11 +31,11 @@ class DeviceTypes(BaseType):
|
|||
def _get_automation_constrains(cls) -> dict:
|
||||
return {
|
||||
'*': {
|
||||
'ansible_enabled': True,
|
||||
'ansible_enabled': False,
|
||||
'ansible_config': {
|
||||
'ansible_connection': 'local',
|
||||
},
|
||||
'ping_enabled': True,
|
||||
'ping_enabled': False,
|
||||
'gather_facts_enabled': False,
|
||||
'gather_accounts_enabled': False,
|
||||
'verify_account_enabled': False,
|
||||
|
|
|
@ -62,6 +62,15 @@ class HostTypes(BaseType):
|
|||
'ansible_connection': 'ssh',
|
||||
},
|
||||
},
|
||||
cls.OTHER_HOST: {
|
||||
'ansible_enabled': False,
|
||||
'ping_enabled': False,
|
||||
'gather_facts_enabled': False,
|
||||
'gather_accounts_enabled': False,
|
||||
'verify_account_enabled': False,
|
||||
'change_secret_enabled': False,
|
||||
'push_account_enabled': False
|
||||
},
|
||||
}
|
||||
|
||||
@classmethod
|
||||
|
@ -71,14 +80,20 @@ class HostTypes(BaseType):
|
|||
{'name': 'Linux'},
|
||||
{
|
||||
'name': GATEWAY_NAME,
|
||||
'domain_enabled': False,
|
||||
'domain_enabled': True,
|
||||
}
|
||||
],
|
||||
cls.UNIX: [
|
||||
{'name': 'Unix'},
|
||||
{'name': 'macOS'},
|
||||
{'name': 'BSD'},
|
||||
{'name': 'AIX'},
|
||||
{
|
||||
'name': 'AIX',
|
||||
'automation': {
|
||||
'push_account_method': 'push_account_aix',
|
||||
'change_secret_method': 'change_secret_aix',
|
||||
}
|
||||
},
|
||||
],
|
||||
cls.WINDOWS: [
|
||||
{'name': 'Windows'},
|
||||
|
@ -103,8 +118,7 @@ class HostTypes(BaseType):
|
|||
}
|
||||
}
|
||||
}
|
||||
],
|
||||
cls.OTHER_HOST: []
|
||||
]
|
||||
}
|
||||
|
||||
@classmethod
|
||||
|
|
|
@ -96,6 +96,9 @@ class Protocol(ChoicesMixin, models.TextChoices):
|
|||
'port': 6379,
|
||||
'required': True,
|
||||
'secret_types': ['password'],
|
||||
'setting': {
|
||||
'auth_username': True,
|
||||
}
|
||||
},
|
||||
}
|
||||
|
||||
|
|
|
@ -197,7 +197,7 @@ class AllTypes(ChoicesMixin):
|
|||
category_type_mapper[p.category] += platform_count[p.id]
|
||||
tp_platforms[p.category + '_' + p.type].append(p)
|
||||
|
||||
root = dict(id='ROOT', name=_('All types'), title='所有类型', open=True, isParent=True)
|
||||
root = dict(id='ROOT', name=_('All types'), title=_('All types'), open=True, isParent=True)
|
||||
nodes = [root]
|
||||
for category, type_cls in cls.category_types():
|
||||
# Category 格式化
|
||||
|
@ -253,17 +253,20 @@ class AllTypes(ChoicesMixin):
|
|||
return data
|
||||
|
||||
@classmethod
|
||||
def create_or_update_by_platform_data(cls, name, platform_data):
|
||||
from assets.models import Platform, PlatformAutomation, PlatformProtocol
|
||||
def create_or_update_by_platform_data(cls, name, platform_data, platform_cls=None):
|
||||
# 不直接用 Platform 是因为可能在 migrations 中使用
|
||||
from assets.models import Platform
|
||||
if platform_cls is None:
|
||||
platform_cls = Platform
|
||||
|
||||
automation_data = platform_data.pop('automation', {})
|
||||
protocols_data = platform_data.pop('protocols', [])
|
||||
|
||||
platform, created = Platform.objects.update_or_create(
|
||||
platform, created = platform_cls.objects.update_or_create(
|
||||
defaults=platform_data, name=name
|
||||
)
|
||||
if not platform.automation:
|
||||
automation = PlatformAutomation.objects.create()
|
||||
automation = platform_cls.automation.field.related_model.objects.create()
|
||||
platform.automation = automation
|
||||
platform.save()
|
||||
else:
|
||||
|
@ -275,10 +278,13 @@ class AllTypes(ChoicesMixin):
|
|||
platform.protocols.all().delete()
|
||||
for p in protocols_data:
|
||||
p.pop('primary', None)
|
||||
PlatformProtocol.objects.create(**p, platform=platform)
|
||||
platform.protocols.create(**p)
|
||||
|
||||
@classmethod
|
||||
def create_or_update_internal_platforms(cls):
|
||||
def create_or_update_internal_platforms(cls, platform_cls=None):
|
||||
if platform_cls is None:
|
||||
platform_cls = cls
|
||||
|
||||
print("\n\tCreate internal platforms")
|
||||
for category, type_cls in cls.category_types():
|
||||
print("\t## Category: {}".format(category.label))
|
||||
|
@ -304,14 +310,14 @@ class AllTypes(ChoicesMixin):
|
|||
setting = _protocols_setting.get(p['name'], {})
|
||||
p['required'] = p.pop('required', False)
|
||||
p['default'] = p.pop('default', False)
|
||||
p['setting'] = {**setting, **p.get('setting', {})}
|
||||
p['setting'] = {**p.get('setting', {}), **setting}
|
||||
|
||||
platform_data = {
|
||||
**default_platform_data, **d,
|
||||
'automation': {**default_automation, **_automation},
|
||||
'protocols': protocols_data
|
||||
}
|
||||
cls.create_or_update_by_platform_data(name, platform_data)
|
||||
cls.create_or_update_by_platform_data(name, platform_data, platform_cls=platform_cls)
|
||||
|
||||
@classmethod
|
||||
def update_user_create_platforms(cls, platform_cls):
|
||||
|
@ -323,9 +329,8 @@ class AllTypes(ChoicesMixin):
|
|||
internal_platforms.append(d['name'])
|
||||
|
||||
user_platforms = platform_cls.objects.exclude(name__in=internal_platforms)
|
||||
user_platforms.update(internal=False)
|
||||
|
||||
for platform in user_platforms:
|
||||
print("\t- Update platform: {}".format(platform.name))
|
||||
platform_data = cls.get_type_default_platform(platform.category, platform.type)
|
||||
cls.create_or_update_by_platform_data(platform.name, platform_data)
|
||||
cls.create_or_update_by_platform_data(platform.name, platform_data, platform_cls=platform_cls)
|
||||
user_platforms.update(internal=False)
|
||||
|
|
|
@ -1,3 +1,4 @@
|
|||
from django.db import models
|
||||
from django.utils.translation import gettext_lazy as _
|
||||
|
||||
from .base import BaseType
|
||||
|
@ -20,6 +21,8 @@ class WebTypes(BaseType):
|
|||
def _get_automation_constrains(cls) -> dict:
|
||||
constrains = {
|
||||
'*': {
|
||||
'ansible_enabled': False,
|
||||
'ping_enabled': False,
|
||||
'gather_facts_enabled': False,
|
||||
'verify_account_enabled': False,
|
||||
'change_secret_enabled': False,
|
||||
|
@ -50,3 +53,9 @@ class WebTypes(BaseType):
|
|||
return [
|
||||
cls.WEBSITE,
|
||||
]
|
||||
|
||||
|
||||
class FillType(models.TextChoices):
|
||||
no = 'no', _('Disabled')
|
||||
basic = 'basic', _('Basic')
|
||||
script = 'script', _('Script')
|
||||
|
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue