mirror of https://github.com/jumpserver/jumpserver
perf: 修改登录
commit
786d96ee6e
|
@ -28,9 +28,9 @@ class AccountViewSet(OrgBulkModelViewSet):
|
||||||
'default': serializers.AccountSerializer,
|
'default': serializers.AccountSerializer,
|
||||||
}
|
}
|
||||||
rbac_perms = {
|
rbac_perms = {
|
||||||
'verify_account': 'assets.test_account',
|
'verify_account': 'accounts.test_account',
|
||||||
'partial_update': 'assets.change_accountsecret',
|
'partial_update': ['accounts.change_accountsecret', 'accounts.change_account'],
|
||||||
'su_from_accounts': 'assets.view_account',
|
'su_from_accounts': 'accounts.view_account',
|
||||||
}
|
}
|
||||||
|
|
||||||
@action(methods=['get'], detail=False, url_path='su-from-accounts')
|
@action(methods=['get'], detail=False, url_path='su-from-accounts')
|
||||||
|
@ -67,8 +67,8 @@ class AccountSecretsViewSet(RecordViewLogMixin, AccountViewSet):
|
||||||
http_method_names = ['get', 'options']
|
http_method_names = ['get', 'options']
|
||||||
permission_classes = [RBACPermission, UserConfirmation.require(ConfirmType.MFA)]
|
permission_classes = [RBACPermission, UserConfirmation.require(ConfirmType.MFA)]
|
||||||
rbac_perms = {
|
rbac_perms = {
|
||||||
'list': 'assets.view_accountsecret',
|
'list': 'accounts.view_accountsecret',
|
||||||
'retrieve': 'assets.view_accountsecret',
|
'retrieve': 'accounts.view_accountsecret',
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -77,5 +77,6 @@ class BaseAccountSerializer(AuthValidateMixin, BulkOrgResourceModelSerializer):
|
||||||
'date_verified', 'created_by', 'date_created',
|
'date_verified', 'created_by', 'date_created',
|
||||||
]
|
]
|
||||||
extra_kwargs = {
|
extra_kwargs = {
|
||||||
|
'name': {'required': True},
|
||||||
'spec_info': {'label': _('Spec info')},
|
'spec_info': {'label': _('Spec info')},
|
||||||
}
|
}
|
||||||
|
|
|
@ -5,7 +5,7 @@ from rest_framework.serializers import ValidationError
|
||||||
from rest_framework.views import APIView, Response
|
from rest_framework.views import APIView, Response
|
||||||
|
|
||||||
from common.utils import get_logger
|
from common.utils import get_logger
|
||||||
from assets.tasks import test_assets_connectivity_manual
|
from assets.tasks import test_gateways_connectivity_manual
|
||||||
from orgs.mixins.api import OrgBulkModelViewSet
|
from orgs.mixins.api import OrgBulkModelViewSet
|
||||||
from .. import serializers
|
from .. import serializers
|
||||||
from ..models import Domain, Gateway
|
from ..models import Domain, Gateway
|
||||||
|
@ -55,5 +55,5 @@ class GatewayTestConnectionApi(SingleObjectMixin, APIView):
|
||||||
local_port = int(local_port)
|
local_port = int(local_port)
|
||||||
except ValueError:
|
except ValueError:
|
||||||
raise ValidationError({'port': _('Number required')})
|
raise ValidationError({'port': _('Number required')})
|
||||||
task = test_assets_connectivity_manual.delay([gateway.id], local_port)
|
task = test_gateways_connectivity_manual([gateway.id], local_port)
|
||||||
return Response({'task': task.id})
|
return Response({'task': task.id})
|
||||||
|
|
|
@ -1,24 +1,25 @@
|
||||||
# ~*~ coding: utf-8 ~*~
|
# ~*~ coding: utf-8 ~*~
|
||||||
from functools import partial
|
|
||||||
from collections import namedtuple, defaultdict
|
from collections import namedtuple, defaultdict
|
||||||
|
from functools import partial
|
||||||
|
|
||||||
from django.db.models.signals import m2m_changed
|
from django.db.models.signals import m2m_changed
|
||||||
from django.utils.translation import ugettext_lazy as _
|
from django.utils.translation import ugettext_lazy as _
|
||||||
from rest_framework import status
|
from rest_framework import status
|
||||||
from rest_framework.response import Response
|
|
||||||
from rest_framework.decorators import action
|
from rest_framework.decorators import action
|
||||||
from rest_framework.generics import get_object_or_404
|
from rest_framework.generics import get_object_or_404
|
||||||
|
from rest_framework.response import Response
|
||||||
from rest_framework.serializers import ValidationError
|
from rest_framework.serializers import ValidationError
|
||||||
|
|
||||||
from assets.models import Asset
|
from assets.models import Asset
|
||||||
from common.const.http import POST
|
from rbac.permissions import RBACPermission
|
||||||
from common.utils import get_logger
|
|
||||||
from common.api import SuggestionMixin
|
from common.api import SuggestionMixin
|
||||||
from common.exceptions import SomeoneIsDoingThis
|
from common.const.http import POST
|
||||||
from common.const.signals import PRE_REMOVE, POST_REMOVE
|
from common.const.signals import PRE_REMOVE, POST_REMOVE
|
||||||
|
from common.exceptions import SomeoneIsDoingThis
|
||||||
|
from common.utils import get_logger
|
||||||
from orgs.mixins import generics
|
from orgs.mixins import generics
|
||||||
from orgs.utils import current_org
|
|
||||||
from orgs.mixins.api import OrgBulkModelViewSet
|
from orgs.mixins.api import OrgBulkModelViewSet
|
||||||
|
from orgs.utils import current_org
|
||||||
from .. import serializers
|
from .. import serializers
|
||||||
from ..models import Node
|
from ..models import Node
|
||||||
from ..tasks import (
|
from ..tasks import (
|
||||||
|
@ -27,6 +28,7 @@ from ..tasks import (
|
||||||
check_node_assets_amount_task
|
check_node_assets_amount_task
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
logger = get_logger(__file__)
|
logger = get_logger(__file__)
|
||||||
__all__ = [
|
__all__ = [
|
||||||
'NodeViewSet', 'NodeAssetsApi', 'NodeAddAssetsApi',
|
'NodeViewSet', 'NodeAssetsApi', 'NodeAddAssetsApi',
|
||||||
|
@ -100,6 +102,10 @@ class NodeAddAssetsApi(generics.UpdateAPIView):
|
||||||
model = Node
|
model = Node
|
||||||
serializer_class = serializers.NodeAssetsSerializer
|
serializer_class = serializers.NodeAssetsSerializer
|
||||||
instance = None
|
instance = None
|
||||||
|
permission_classes = (RBACPermission,)
|
||||||
|
rbac_perms = {
|
||||||
|
'PUT': 'assets.add_assettonode',
|
||||||
|
}
|
||||||
|
|
||||||
def perform_update(self, serializer):
|
def perform_update(self, serializer):
|
||||||
assets = serializer.validated_data.get('assets')
|
assets = serializer.validated_data.get('assets')
|
||||||
|
@ -111,6 +117,10 @@ class NodeRemoveAssetsApi(generics.UpdateAPIView):
|
||||||
model = Node
|
model = Node
|
||||||
serializer_class = serializers.NodeAssetsSerializer
|
serializer_class = serializers.NodeAssetsSerializer
|
||||||
instance = None
|
instance = None
|
||||||
|
permission_classes = (RBACPermission,)
|
||||||
|
rbac_perms = {
|
||||||
|
'PUT': 'assets.remove_assetfromnode',
|
||||||
|
}
|
||||||
|
|
||||||
def perform_update(self, serializer):
|
def perform_update(self, serializer):
|
||||||
assets = serializer.validated_data.get('assets')
|
assets = serializer.validated_data.get('assets')
|
||||||
|
@ -129,6 +139,10 @@ class MoveAssetsToNodeApi(generics.UpdateAPIView):
|
||||||
model = Node
|
model = Node
|
||||||
serializer_class = serializers.NodeAssetsSerializer
|
serializer_class = serializers.NodeAssetsSerializer
|
||||||
instance = None
|
instance = None
|
||||||
|
permission_classes = (RBACPermission,)
|
||||||
|
rbac_perms = {
|
||||||
|
'PUT': 'assets.move_assettonode',
|
||||||
|
}
|
||||||
|
|
||||||
def perform_update(self, serializer):
|
def perform_update(self, serializer):
|
||||||
assets = serializer.validated_data.get('assets')
|
assets = serializer.validated_data.get('assets')
|
||||||
|
@ -210,7 +224,7 @@ class NodeTaskCreateApi(generics.CreateAPIView):
|
||||||
return
|
return
|
||||||
|
|
||||||
if action == "refresh":
|
if action == "refresh":
|
||||||
task = update_node_assets_hardware_info_manual.delay(node.id)
|
task = update_node_assets_hardware_info_manual(node.id)
|
||||||
else:
|
else:
|
||||||
task = test_node_assets_connectivity_manual.delay(node.id)
|
task = test_node_assets_connectivity_manual(node.id)
|
||||||
self.set_serializer_data(serializer, task)
|
self.set_serializer_data(serializer, task)
|
||||||
|
|
|
@ -289,4 +289,5 @@ class Asset(NodesRelationMixin, AbsConnectivity, JMSOrgBaseModel):
|
||||||
('match_asset', _('Can match asset')),
|
('match_asset', _('Can match asset')),
|
||||||
('add_assettonode', _('Add asset to node')),
|
('add_assettonode', _('Add asset to node')),
|
||||||
('move_assettonode', _('Move asset to node')),
|
('move_assettonode', _('Move asset to node')),
|
||||||
|
('remove_assetfromnode', _('Remove asset from node'))
|
||||||
]
|
]
|
||||||
|
|
|
@ -60,7 +60,7 @@ class AssetAccountSerializer(
|
||||||
template = serializers.BooleanField(
|
template = serializers.BooleanField(
|
||||||
default=False, label=_("Template"), write_only=True
|
default=False, label=_("Template"), write_only=True
|
||||||
)
|
)
|
||||||
name = serializers.CharField(max_length=128, required=False, label=_("Name"))
|
name = serializers.CharField(max_length=128, required=True, label=_("Name"))
|
||||||
|
|
||||||
class Meta:
|
class Meta:
|
||||||
model = Account
|
model = Account
|
||||||
|
|
|
@ -21,13 +21,13 @@ def on_node_pre_save(sender, instance: Node, **kwargs):
|
||||||
|
|
||||||
|
|
||||||
@merge_delay_run(ttl=5, key=key_by_org)
|
@merge_delay_run(ttl=5, key=key_by_org)
|
||||||
def test_assets_connectivity_handler(*assets):
|
def test_assets_connectivity_handler(assets=()):
|
||||||
task_name = gettext_noop("Test assets connectivity ")
|
task_name = gettext_noop("Test assets connectivity ")
|
||||||
test_assets_connectivity_task.delay(assets, task_name)
|
test_assets_connectivity_task.delay(assets, task_name)
|
||||||
|
|
||||||
|
|
||||||
@merge_delay_run(ttl=5, key=key_by_org)
|
@merge_delay_run(ttl=5, key=key_by_org)
|
||||||
def gather_assets_facts_handler(*assets):
|
def gather_assets_facts_handler(assets=()):
|
||||||
if not assets:
|
if not assets:
|
||||||
logger.info("No assets to update hardware info")
|
logger.info("No assets to update hardware info")
|
||||||
return
|
return
|
||||||
|
@ -36,7 +36,7 @@ def gather_assets_facts_handler(*assets):
|
||||||
|
|
||||||
|
|
||||||
@merge_delay_run(ttl=5, key=key_by_org)
|
@merge_delay_run(ttl=5, key=key_by_org)
|
||||||
def ensure_asset_has_node(*assets):
|
def ensure_asset_has_node(assets=()):
|
||||||
asset_ids = [asset.id for asset in assets]
|
asset_ids = [asset.id for asset in assets]
|
||||||
has_ids = Asset.nodes.through.objects \
|
has_ids = Asset.nodes.through.objects \
|
||||||
.filter(asset_id__in=asset_ids) \
|
.filter(asset_id__in=asset_ids) \
|
||||||
|
@ -60,16 +60,16 @@ def on_asset_create(sender, instance=None, created=False, **kwargs):
|
||||||
return
|
return
|
||||||
logger.info("Asset create signal recv: {}".format(instance))
|
logger.info("Asset create signal recv: {}".format(instance))
|
||||||
|
|
||||||
ensure_asset_has_node(instance)
|
ensure_asset_has_node(assets=(instance,))
|
||||||
|
|
||||||
# 获取资产硬件信息
|
# 获取资产硬件信息
|
||||||
auto_info = instance.auto_info
|
auto_info = instance.auto_info
|
||||||
if auto_info.get('ping_enabled'):
|
if auto_info.get('ping_enabled'):
|
||||||
logger.debug('Asset {} ping enabled, test connectivity'.format(instance.name))
|
logger.debug('Asset {} ping enabled, test connectivity'.format(instance.name))
|
||||||
test_assets_connectivity_handler(instance)
|
test_assets_connectivity_handler(assets=(instance,))
|
||||||
if auto_info.get('gather_facts_enabled'):
|
if auto_info.get('gather_facts_enabled'):
|
||||||
logger.debug('Asset {} gather facts enabled, gather facts'.format(instance.name))
|
logger.debug('Asset {} gather facts enabled, gather facts'.format(instance.name))
|
||||||
gather_assets_facts_handler(instance)
|
gather_assets_facts_handler(assets=(instance,))
|
||||||
|
|
||||||
|
|
||||||
RELATED_NODE_IDS = '_related_node_ids'
|
RELATED_NODE_IDS = '_related_node_ids'
|
||||||
|
|
|
@ -24,7 +24,6 @@ def on_node_asset_change(sender, action, instance, reverse, pk_set, **kwargs):
|
||||||
if action in refused:
|
if action in refused:
|
||||||
raise ValueError
|
raise ValueError
|
||||||
|
|
||||||
logger.debug('Recv asset nodes change signal, recompute node assets amount')
|
|
||||||
mapper = {PRE_ADD: add, POST_REMOVE: sub}
|
mapper = {PRE_ADD: add, POST_REMOVE: sub}
|
||||||
if action not in mapper:
|
if action not in mapper:
|
||||||
return
|
return
|
||||||
|
@ -34,12 +33,13 @@ def on_node_asset_change(sender, action, instance, reverse, pk_set, **kwargs):
|
||||||
node_ids = [instance.id]
|
node_ids = [instance.id]
|
||||||
else:
|
else:
|
||||||
node_ids = pk_set
|
node_ids = pk_set
|
||||||
update_nodes_assets_amount(*node_ids)
|
update_nodes_assets_amount(node_ids=node_ids)
|
||||||
|
|
||||||
|
|
||||||
@merge_delay_run(ttl=5)
|
@merge_delay_run(ttl=5)
|
||||||
def update_nodes_assets_amount(*node_ids):
|
def update_nodes_assets_amount(node_ids=()):
|
||||||
nodes = list(Node.objects.filter(id__in=node_ids))
|
nodes = list(Node.objects.filter(id__in=node_ids))
|
||||||
|
logger.debug('Recv asset nodes change signal, recompute node assets amount')
|
||||||
logger.info('Update nodes assets amount: {} nodes'.format(len(node_ids)))
|
logger.info('Update nodes assets amount: {} nodes'.format(len(node_ids)))
|
||||||
|
|
||||||
if len(node_ids) > 100:
|
if len(node_ids) > 100:
|
||||||
|
|
|
@ -22,7 +22,8 @@ node_assets_mapping_pub_sub = lazy(lambda: RedisPubSub('fm.node_asset_mapping'),
|
||||||
|
|
||||||
|
|
||||||
@merge_delay_run(ttl=5)
|
@merge_delay_run(ttl=5)
|
||||||
def expire_node_assets_mapping(*org_ids):
|
def expire_node_assets_mapping(org_ids=()):
|
||||||
|
logger.debug("Recv asset nodes changed signal, expire memery node asset mapping")
|
||||||
# 所有进程清除(自己的 memory 数据)
|
# 所有进程清除(自己的 memory 数据)
|
||||||
root_org_id = Organization.ROOT_ID
|
root_org_id = Organization.ROOT_ID
|
||||||
Node.expire_node_all_asset_ids_cache_mapping(root_org_id)
|
Node.expire_node_all_asset_ids_cache_mapping(root_org_id)
|
||||||
|
@ -43,18 +44,17 @@ def on_node_post_create(sender, instance, created, update_fields, **kwargs):
|
||||||
need_expire = False
|
need_expire = False
|
||||||
|
|
||||||
if need_expire:
|
if need_expire:
|
||||||
expire_node_assets_mapping(instance.org_id)
|
expire_node_assets_mapping(org_ids=(instance.org_id,))
|
||||||
|
|
||||||
|
|
||||||
@receiver(post_delete, sender=Node)
|
@receiver(post_delete, sender=Node)
|
||||||
def on_node_post_delete(sender, instance, **kwargs):
|
def on_node_post_delete(sender, instance, **kwargs):
|
||||||
expire_node_assets_mapping(instance.org_id)
|
expire_node_assets_mapping(org_ids=(instance.org_id,))
|
||||||
|
|
||||||
|
|
||||||
@receiver(m2m_changed, sender=Asset.nodes.through)
|
@receiver(m2m_changed, sender=Asset.nodes.through)
|
||||||
def on_node_asset_change(sender, instance, **kwargs):
|
def on_node_asset_change(sender, instance, **kwargs):
|
||||||
logger.debug("Recv asset nodes changed signal, expire memery node asset mapping")
|
expire_node_assets_mapping(org_ids=(instance.org_id,))
|
||||||
expire_node_assets_mapping(instance.org_id)
|
|
||||||
|
|
||||||
|
|
||||||
@receiver(django_ready)
|
@receiver(django_ready)
|
||||||
|
|
|
@ -6,3 +6,4 @@ from .common import *
|
||||||
from .automation import *
|
from .automation import *
|
||||||
from .gather_facts import *
|
from .gather_facts import *
|
||||||
from .nodes_amount import *
|
from .nodes_amount import *
|
||||||
|
from .ping_gateway import *
|
||||||
|
|
|
@ -40,4 +40,4 @@ def test_node_assets_connectivity_manual(node_id):
|
||||||
node = Node.objects.get(id=node_id)
|
node = Node.objects.get(id=node_id)
|
||||||
task_name = gettext_noop("Test if the assets under the node are connectable ")
|
task_name = gettext_noop("Test if the assets under the node are connectable ")
|
||||||
assets = node.get_all_assets()
|
assets = node.get_all_assets()
|
||||||
return test_assets_connectivity_task.delay(*assets, task_name)
|
return test_assets_connectivity_task.delay(assets, task_name)
|
||||||
|
|
|
@ -0,0 +1,34 @@
|
||||||
|
# ~*~ coding: utf-8 ~*~
|
||||||
|
from celery import shared_task
|
||||||
|
from django.utils.translation import gettext_noop
|
||||||
|
|
||||||
|
from assets.const import AutomationTypes
|
||||||
|
from common.utils import get_logger
|
||||||
|
from orgs.utils import org_aware_func
|
||||||
|
from .common import quickstart_automation
|
||||||
|
|
||||||
|
logger = get_logger(__file__)
|
||||||
|
|
||||||
|
__all__ = [
|
||||||
|
'test_gateways_connectivity_task',
|
||||||
|
'test_gateways_connectivity_manual',
|
||||||
|
]
|
||||||
|
|
||||||
|
|
||||||
|
@shared_task
|
||||||
|
@org_aware_func('assets')
|
||||||
|
def test_gateways_connectivity_task(assets, local_port, task_name=None):
|
||||||
|
from assets.models import PingAutomation
|
||||||
|
if task_name is None:
|
||||||
|
task_name = gettext_noop("Test gateways connectivity ")
|
||||||
|
|
||||||
|
task_name = PingAutomation.generate_unique_name(task_name)
|
||||||
|
task_snapshot = {'assets': [str(asset.id) for asset in assets], 'local_port': local_port}
|
||||||
|
quickstart_automation(task_name, AutomationTypes.ping_gateway, task_snapshot)
|
||||||
|
|
||||||
|
|
||||||
|
def test_gateways_connectivity_manual(gateway_ids, local_port):
|
||||||
|
from assets.models import Asset
|
||||||
|
gateways = Asset.objects.filter(id__in=gateway_ids)
|
||||||
|
task_name = gettext_noop("Test gateways connectivity ")
|
||||||
|
return test_gateways_connectivity_task.delay(gateways, local_port, task_name)
|
|
@ -41,7 +41,10 @@ def default_suffix_key(*args, **kwargs):
|
||||||
|
|
||||||
|
|
||||||
def key_by_org(*args, **kwargs):
|
def key_by_org(*args, **kwargs):
|
||||||
return args[0].org_id
|
values = list(kwargs.values())
|
||||||
|
if not values:
|
||||||
|
return 'default'
|
||||||
|
return values[0][0].org_id
|
||||||
|
|
||||||
|
|
||||||
class EventLoopThread(threading.Thread):
|
class EventLoopThread(threading.Thread):
|
||||||
|
@ -79,6 +82,15 @@ def cancel_or_remove_debouncer_task(cache_key):
|
||||||
task.cancel()
|
task.cancel()
|
||||||
|
|
||||||
|
|
||||||
|
def run_debouncer_func(cache_key, org, ttl, func, *args, **kwargs):
|
||||||
|
cancel_or_remove_debouncer_task(cache_key)
|
||||||
|
run_func_partial = functools.partial(_run_func_with_org, cache_key, org, func)
|
||||||
|
loop = _loop_thread.get_loop()
|
||||||
|
_debouncer = Debouncer(run_func_partial, lambda: True, ttl, loop=loop, executor=executor)
|
||||||
|
task = asyncio.run_coroutine_threadsafe(_debouncer(*args, **kwargs), loop=loop)
|
||||||
|
_loop_debouncer_func_task_cache[cache_key] = task
|
||||||
|
|
||||||
|
|
||||||
class Debouncer(object):
|
class Debouncer(object):
|
||||||
def __init__(self, callback, check, delay, loop=None, executor=None):
|
def __init__(self, callback, check, delay, loop=None, executor=None):
|
||||||
self.callback = callback
|
self.callback = callback
|
||||||
|
@ -113,12 +125,36 @@ def _run_func_with_org(key, org, func, *args, **kwargs):
|
||||||
_loop_debouncer_func_args_cache.pop(key, None)
|
_loop_debouncer_func_args_cache.pop(key, None)
|
||||||
|
|
||||||
|
|
||||||
def delay_run(ttl=5, key=None, merge_args=False):
|
def delay_run(ttl=5, key=None):
|
||||||
"""
|
"""
|
||||||
延迟执行函数, 在 ttl 秒内, 只执行最后一次
|
延迟执行函数, 在 ttl 秒内, 只执行最后一次
|
||||||
:param ttl:
|
:param ttl:
|
||||||
:param key: 是否合并参数, 一个 callback
|
:param key: 是否合并参数, 一个 callback
|
||||||
:param merge_args: 是否合并之前的参数, bool
|
:return:
|
||||||
|
"""
|
||||||
|
|
||||||
|
def inner(func):
|
||||||
|
suffix_key_func = key if key else default_suffix_key
|
||||||
|
|
||||||
|
@functools.wraps(func)
|
||||||
|
def wrapper(*args, **kwargs):
|
||||||
|
from orgs.utils import get_current_org
|
||||||
|
org = get_current_org()
|
||||||
|
func_name = f'{func.__module__}_{func.__name__}'
|
||||||
|
key_suffix = suffix_key_func(*args)
|
||||||
|
cache_key = f'DELAY_RUN_{func_name}_{key_suffix}'
|
||||||
|
run_debouncer_func(cache_key, org, ttl, func, *args, **kwargs)
|
||||||
|
|
||||||
|
return wrapper
|
||||||
|
|
||||||
|
return inner
|
||||||
|
|
||||||
|
|
||||||
|
def merge_delay_run(ttl=5, key=None):
|
||||||
|
"""
|
||||||
|
延迟执行函数, 在 ttl 秒内, 只执行最后一次, 并且合并参数
|
||||||
|
:param ttl:
|
||||||
|
:param key: 是否合并参数, 一个 callback
|
||||||
:return:
|
:return:
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
@ -127,48 +163,43 @@ def delay_run(ttl=5, key=None, merge_args=False):
|
||||||
if len(sigs.parameters) != 1:
|
if len(sigs.parameters) != 1:
|
||||||
raise ValueError('func must have one arguments: %s' % func.__name__)
|
raise ValueError('func must have one arguments: %s' % func.__name__)
|
||||||
param = list(sigs.parameters.values())[0]
|
param = list(sigs.parameters.values())[0]
|
||||||
if not str(param).startswith('*') or param.kind == param.VAR_KEYWORD:
|
if not isinstance(param.default, tuple):
|
||||||
raise ValueError('func args must be startswith *: %s and not have **kwargs ' % func.__name__)
|
raise ValueError('func default must be tuple: %s' % param.default)
|
||||||
suffix_key_func = key if key else default_suffix_key
|
suffix_key_func = key if key else default_suffix_key
|
||||||
|
|
||||||
@functools.wraps(func)
|
@functools.wraps(func)
|
||||||
def wrapper(*args):
|
def wrapper(*args, **kwargs):
|
||||||
from orgs.utils import get_current_org
|
from orgs.utils import get_current_org
|
||||||
org = get_current_org()
|
org = get_current_org()
|
||||||
func_name = f'{func.__module__}_{func.__name__}'
|
func_name = f'{func.__module__}_{func.__name__}'
|
||||||
key_suffix = suffix_key_func(*args)
|
key_suffix = suffix_key_func(*args, **kwargs)
|
||||||
cache_key = f'DELAY_RUN_{func_name}_{key_suffix}'
|
cache_key = f'MERGE_DELAY_RUN_{func_name}_{key_suffix}'
|
||||||
new_arg = args
|
cache_kwargs = _loop_debouncer_func_args_cache.get(cache_key, {})
|
||||||
if merge_args:
|
|
||||||
values = _loop_debouncer_func_args_cache.get(cache_key, [])
|
|
||||||
new_arg = [*values, *args]
|
|
||||||
_loop_debouncer_func_args_cache[cache_key] = new_arg
|
|
||||||
|
|
||||||
cancel_or_remove_debouncer_task(cache_key)
|
for k, v in kwargs.items():
|
||||||
|
if not isinstance(v, (tuple, list, set)):
|
||||||
run_func_partial = functools.partial(_run_func_with_org, cache_key, org, func)
|
raise ValueError('func kwargs value must be list or tuple: %s %s' % (func.__name__, v))
|
||||||
loop = _loop_thread.get_loop()
|
if k not in cache_kwargs:
|
||||||
_debouncer = Debouncer(run_func_partial, lambda: True, ttl,
|
cache_kwargs[k] = v
|
||||||
loop=loop, executor=executor)
|
elif isinstance(v, set):
|
||||||
task = asyncio.run_coroutine_threadsafe(_debouncer(*new_arg),
|
cache_kwargs[k] = cache_kwargs[k].union(v)
|
||||||
loop=loop)
|
else:
|
||||||
_loop_debouncer_func_task_cache[cache_key] = task
|
cache_kwargs[k] = list(cache_kwargs[k]) + list(v)
|
||||||
|
_loop_debouncer_func_args_cache[cache_key] = cache_kwargs
|
||||||
|
run_debouncer_func(cache_key, org, ttl, func, *args, **cache_kwargs)
|
||||||
|
|
||||||
return wrapper
|
return wrapper
|
||||||
|
|
||||||
return inner
|
return inner
|
||||||
|
|
||||||
|
|
||||||
merge_delay_run = functools.partial(delay_run, merge_args=True)
|
|
||||||
|
|
||||||
|
|
||||||
@delay_run(ttl=5)
|
@delay_run(ttl=5)
|
||||||
def test_delay_run(*username):
|
def test_delay_run(username):
|
||||||
print("Hello, %s, now is %s" % (username, time.time()))
|
print("Hello, %s, now is %s" % (username, time.time()))
|
||||||
|
|
||||||
|
|
||||||
@delay_run(ttl=5, key=lambda *users: users[0][0], merge_args=True)
|
@merge_delay_run(ttl=5, key=lambda users=(): users[0][0])
|
||||||
def test_merge_delay_run(*users):
|
def test_merge_delay_run(users=()):
|
||||||
name = ','.join(users)
|
name = ','.join(users)
|
||||||
time.sleep(2)
|
time.sleep(2)
|
||||||
print("Hello, %s, now is %s" % (name, time.time()))
|
print("Hello, %s, now is %s" % (name, time.time()))
|
||||||
|
@ -179,8 +210,8 @@ def do_test():
|
||||||
print("start : %s" % time.time())
|
print("start : %s" % time.time())
|
||||||
for i in range(100):
|
for i in range(100):
|
||||||
# test_delay_run('test', year=i)
|
# test_delay_run('test', year=i)
|
||||||
test_merge_delay_run('test %s' % i)
|
test_merge_delay_run(users=['test %s' % i])
|
||||||
test_merge_delay_run('best %s' % i)
|
test_merge_delay_run(users=['best %s' % i])
|
||||||
test_delay_run('test run %s' % i)
|
test_delay_run('test run %s' % i)
|
||||||
|
|
||||||
end = time.time()
|
end = time.time()
|
||||||
|
|
|
@ -1,6 +1,6 @@
|
||||||
from django.core.management.base import BaseCommand
|
from django.core.management.base import BaseCommand
|
||||||
|
|
||||||
from assets.signal_handlers.node_assets_mapping import expire_node_assets_mapping
|
from assets.signal_handlers.node_assets_mapping import expire_node_assets_mapping as _expire_node_assets_mapping
|
||||||
from orgs.caches import OrgResourceStatisticsCache
|
from orgs.caches import OrgResourceStatisticsCache
|
||||||
from orgs.models import Organization
|
from orgs.models import Organization
|
||||||
|
|
||||||
|
@ -10,7 +10,7 @@ def expire_node_assets_mapping():
|
||||||
org_ids = [*org_ids, '00000000-0000-0000-0000-000000000000']
|
org_ids = [*org_ids, '00000000-0000-0000-0000-000000000000']
|
||||||
|
|
||||||
for org_id in org_ids:
|
for org_id in org_ids:
|
||||||
expire_node_assets_mapping(org_id)
|
_expire_node_assets_mapping(org_ids=(org_id,))
|
||||||
|
|
||||||
|
|
||||||
def expire_org_resource_statistics_cache():
|
def expire_org_resource_statistics_cache():
|
||||||
|
|
|
@ -1,3 +1,3 @@
|
||||||
version https://git-lfs.github.com/spec/v1
|
version https://git-lfs.github.com/spec/v1
|
||||||
oid sha256:7e35d73f8576a0ea30a0da3886b24033f61f1019f6e15466d7b5904b5dd15ef9
|
oid sha256:6a677ea531b36752e5d3cbfde37bd2667539390e12281b7a4c595fa4d65c1a8a
|
||||||
size 136075
|
size 135351
|
||||||
|
|
File diff suppressed because it is too large
Load Diff
|
@ -1,3 +1,3 @@
|
||||||
version https://git-lfs.github.com/spec/v1
|
version https://git-lfs.github.com/spec/v1
|
||||||
oid sha256:1d3093d239e72a1ab35464fcdebd157330dbde7ae1cfd0f89a7d75c52eade900
|
oid sha256:3ea4776e6efd07b7d818b5f64d68b14d0c1cbf089766582bd4c2b43e1b183bf8
|
||||||
size 111883
|
size 111254
|
||||||
|
|
File diff suppressed because it is too large
Load Diff
|
@ -10,7 +10,7 @@ __all__ = ['JMSInventory']
|
||||||
|
|
||||||
class JMSInventory:
|
class JMSInventory:
|
||||||
def __init__(self, assets, account_policy='privileged_first',
|
def __init__(self, assets, account_policy='privileged_first',
|
||||||
account_prefer='root,Administrator', host_callback=None):
|
account_prefer='root,Administrator', host_callback=None, exclude_localhost=False):
|
||||||
"""
|
"""
|
||||||
:param assets:
|
:param assets:
|
||||||
:param account_prefer: account username name if not set use account_policy
|
:param account_prefer: account username name if not set use account_policy
|
||||||
|
@ -21,6 +21,7 @@ class JMSInventory:
|
||||||
self.account_policy = account_policy
|
self.account_policy = account_policy
|
||||||
self.host_callback = host_callback
|
self.host_callback = host_callback
|
||||||
self.exclude_hosts = {}
|
self.exclude_hosts = {}
|
||||||
|
self.exclude_localhost = exclude_localhost
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def clean_assets(assets):
|
def clean_assets(assets):
|
||||||
|
@ -117,7 +118,10 @@ class JMSInventory:
|
||||||
if host['jms_account'] and asset.platform.type == 'oracle':
|
if host['jms_account'] and asset.platform.type == 'oracle':
|
||||||
host['jms_account']['mode'] = 'sysdba' if account.privileged else None
|
host['jms_account']['mode'] = 'sysdba' if account.privileged else None
|
||||||
|
|
||||||
ansible_config = dict(automation.ansible_config)
|
try:
|
||||||
|
ansible_config = dict(automation.ansible_config)
|
||||||
|
except Exception as e:
|
||||||
|
ansible_config = {}
|
||||||
ansible_connection = ansible_config.get('ansible_connection', 'ssh')
|
ansible_connection = ansible_config.get('ansible_connection', 'ssh')
|
||||||
host.update(ansible_config)
|
host.update(ansible_config)
|
||||||
|
|
||||||
|
@ -204,6 +208,8 @@ class JMSInventory:
|
||||||
for host in hosts:
|
for host in hosts:
|
||||||
name = host.pop('name')
|
name = host.pop('name')
|
||||||
data['all']['hosts'][name] = host
|
data['all']['hosts'][name] = host
|
||||||
|
if self.exclude_localhost and data['all']['hosts'].__contains__('localhost'):
|
||||||
|
data['all']['hosts'].update({'localhost': {'ansible_host': '255.255.255.255'}})
|
||||||
return data
|
return data
|
||||||
|
|
||||||
def write_to_file(self, path):
|
def write_to_file(self, path):
|
||||||
|
|
|
@ -4,6 +4,7 @@ import zipfile
|
||||||
|
|
||||||
from django.conf import settings
|
from django.conf import settings
|
||||||
from django.shortcuts import get_object_or_404
|
from django.shortcuts import get_object_or_404
|
||||||
|
from rest_framework import status
|
||||||
|
|
||||||
from orgs.mixins.api import OrgBulkModelViewSet
|
from orgs.mixins.api import OrgBulkModelViewSet
|
||||||
from ..exception import PlaybookNoValidEntry
|
from ..exception import PlaybookNoValidEntry
|
||||||
|
@ -129,25 +130,25 @@ class PlaybookFileBrowserAPIView(APIView):
|
||||||
work_path = playbook.work_dir
|
work_path = playbook.work_dir
|
||||||
|
|
||||||
file_key = request.data.get('key', '')
|
file_key = request.data.get('key', '')
|
||||||
|
new_name = request.data.get('new_name', '')
|
||||||
|
|
||||||
if file_key in self.protected_files:
|
if file_key in self.protected_files and new_name:
|
||||||
return Response({'msg': '{} can not be modified'.format(file_key)}, status=400)
|
return Response({'msg': '{} can not be rename'.format(file_key)}, status=status.HTTP_400_BAD_REQUEST)
|
||||||
|
|
||||||
if os.path.dirname(file_key) == 'root':
|
if os.path.dirname(file_key) == 'root':
|
||||||
file_key = os.path.basename(file_key)
|
file_key = os.path.basename(file_key)
|
||||||
|
|
||||||
new_name = request.data.get('new_name', '')
|
|
||||||
content = request.data.get('content', '')
|
content = request.data.get('content', '')
|
||||||
is_directory = request.data.get('is_directory', False)
|
is_directory = request.data.get('is_directory', False)
|
||||||
|
|
||||||
if not file_key or file_key == 'root':
|
if not file_key or file_key == 'root':
|
||||||
return Response(status=400)
|
return Response(status=status.HTTP_400_BAD_REQUEST)
|
||||||
file_path = os.path.join(work_path, file_key)
|
file_path = os.path.join(work_path, file_key)
|
||||||
|
|
||||||
if new_name:
|
if new_name:
|
||||||
new_file_path = os.path.join(os.path.dirname(file_path), new_name)
|
new_file_path = os.path.join(os.path.dirname(file_path), new_name)
|
||||||
if os.path.exists(new_file_path):
|
if os.path.exists(new_file_path):
|
||||||
return Response({'msg': '{} already exists'.format(new_name)}, status=400)
|
return Response({'msg': '{} already exists'.format(new_name)}, status=status.HTTP_400_BAD_REQUEST)
|
||||||
os.rename(file_path, new_file_path)
|
os.rename(file_path, new_file_path)
|
||||||
file_path = new_file_path
|
file_path = new_file_path
|
||||||
|
|
||||||
|
@ -162,9 +163,9 @@ class PlaybookFileBrowserAPIView(APIView):
|
||||||
work_path = playbook.work_dir
|
work_path = playbook.work_dir
|
||||||
file_key = request.query_params.get('key', '')
|
file_key = request.query_params.get('key', '')
|
||||||
if not file_key:
|
if not file_key:
|
||||||
return Response({'msg': 'key is required'}, status=400)
|
return Response({'msg': 'key is required'}, status=status.HTTP_400_BAD_REQUEST)
|
||||||
if file_key in self.protected_files:
|
if file_key in self.protected_files:
|
||||||
return Response({'msg': ' {} can not be delete'.format(file_key)}, status=400)
|
return Response({'msg': ' {} can not be delete'.format(file_key)}, status=status.HTTP_400_BAD_REQUEST)
|
||||||
file_path = os.path.join(work_path, file_key)
|
file_path = os.path.join(work_path, file_key)
|
||||||
if os.path.isdir(file_path):
|
if os.path.isdir(file_path):
|
||||||
shutil.rmtree(file_path)
|
shutil.rmtree(file_path)
|
||||||
|
|
|
@ -0,0 +1,31 @@
|
||||||
|
# Generated by Django 3.2.16 on 2023-02-13 07:03
|
||||||
|
|
||||||
|
from django.conf import settings
|
||||||
|
from django.db import migrations
|
||||||
|
|
||||||
|
|
||||||
|
class Migration(migrations.Migration):
|
||||||
|
|
||||||
|
dependencies = [
|
||||||
|
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
|
||||||
|
('ops', '0027_auto_20230206_1927'),
|
||||||
|
]
|
||||||
|
|
||||||
|
operations = [
|
||||||
|
migrations.AlterModelOptions(
|
||||||
|
name='playbook',
|
||||||
|
options={'ordering': ['date_created']},
|
||||||
|
),
|
||||||
|
migrations.AlterUniqueTogether(
|
||||||
|
name='adhoc',
|
||||||
|
unique_together={('name', 'org_id', 'creator')},
|
||||||
|
),
|
||||||
|
migrations.AlterUniqueTogether(
|
||||||
|
name='job',
|
||||||
|
unique_together={('name', 'org_id', 'creator')},
|
||||||
|
),
|
||||||
|
migrations.AlterUniqueTogether(
|
||||||
|
name='playbook',
|
||||||
|
unique_together={('name', 'org_id', 'creator')},
|
||||||
|
),
|
||||||
|
]
|
|
@ -17,7 +17,6 @@ logger = get_logger(__file__)
|
||||||
|
|
||||||
|
|
||||||
class AdHoc(JMSOrgBaseModel):
|
class AdHoc(JMSOrgBaseModel):
|
||||||
|
|
||||||
id = models.UUIDField(default=uuid.uuid4, primary_key=True)
|
id = models.UUIDField(default=uuid.uuid4, primary_key=True)
|
||||||
name = models.CharField(max_length=128, verbose_name=_('Name'))
|
name = models.CharField(max_length=128, verbose_name=_('Name'))
|
||||||
pattern = models.CharField(max_length=1024, verbose_name=_("Pattern"), default='all')
|
pattern = models.CharField(max_length=1024, verbose_name=_("Pattern"), default='all')
|
||||||
|
@ -42,4 +41,5 @@ class AdHoc(JMSOrgBaseModel):
|
||||||
return "{}: {}".format(self.module, self.args)
|
return "{}: {}".format(self.module, self.args)
|
||||||
|
|
||||||
class Meta:
|
class Meta:
|
||||||
|
unique_together = [('name', 'org_id', 'creator')]
|
||||||
verbose_name = _("AdHoc")
|
verbose_name = _("AdHoc")
|
||||||
|
|
|
@ -96,6 +96,7 @@ class Job(JMSOrgBaseModel, PeriodTaskModelMixin):
|
||||||
|
|
||||||
class Meta:
|
class Meta:
|
||||||
verbose_name = _("Job")
|
verbose_name = _("Job")
|
||||||
|
unique_together = [('name', 'org_id', 'creator')]
|
||||||
ordering = ['date_created']
|
ordering = ['date_created']
|
||||||
|
|
||||||
|
|
||||||
|
@ -294,10 +295,20 @@ class JobExecution(JMSOrgBaseModel):
|
||||||
task_id = current_task.request.root_id
|
task_id = current_task.request.root_id
|
||||||
self.task_id = task_id
|
self.task_id = task_id
|
||||||
|
|
||||||
|
def check_danger_keywords(self):
|
||||||
|
lines = self.job.playbook.check_dangerous_keywords()
|
||||||
|
if len(lines) > 0:
|
||||||
|
for line in lines:
|
||||||
|
print('\033[31mThe {} line of the file \'{}\' contains the '
|
||||||
|
'dangerous keyword \'{}\'\033[0m'.format(line['line'], line['file'], line['keyword']))
|
||||||
|
raise Exception("Playbook contains dangerous keywords")
|
||||||
|
|
||||||
def start(self, **kwargs):
|
def start(self, **kwargs):
|
||||||
self.date_start = timezone.now()
|
self.date_start = timezone.now()
|
||||||
self.set_celery_id()
|
self.set_celery_id()
|
||||||
self.save()
|
self.save()
|
||||||
|
if self.job.type == 'playbook':
|
||||||
|
self.check_danger_keywords()
|
||||||
runner = self.get_runner()
|
runner = self.get_runner()
|
||||||
try:
|
try:
|
||||||
cb = runner.run(**kwargs)
|
cb = runner.run(**kwargs)
|
||||||
|
|
|
@ -9,6 +9,13 @@ from ops.const import CreateMethods
|
||||||
from ops.exception import PlaybookNoValidEntry
|
from ops.exception import PlaybookNoValidEntry
|
||||||
from orgs.mixins.models import JMSOrgBaseModel
|
from orgs.mixins.models import JMSOrgBaseModel
|
||||||
|
|
||||||
|
dangerous_keywords = (
|
||||||
|
'delegate_to:localhost',
|
||||||
|
'delegate_to:127.0.0.1',
|
||||||
|
'local_action',
|
||||||
|
'connection:local',
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
class Playbook(JMSOrgBaseModel):
|
class Playbook(JMSOrgBaseModel):
|
||||||
id = models.UUIDField(default=uuid.uuid4, primary_key=True)
|
id = models.UUIDField(default=uuid.uuid4, primary_key=True)
|
||||||
|
@ -20,6 +27,27 @@ class Playbook(JMSOrgBaseModel):
|
||||||
verbose_name=_('CreateMethod'))
|
verbose_name=_('CreateMethod'))
|
||||||
vcs_url = models.CharField(max_length=1024, default='', verbose_name=_('VCS URL'), null=True, blank=True)
|
vcs_url = models.CharField(max_length=1024, default='', verbose_name=_('VCS URL'), null=True, blank=True)
|
||||||
|
|
||||||
|
def check_dangerous_keywords(self):
|
||||||
|
result = []
|
||||||
|
for root, dirs, files in os.walk(self.work_dir):
|
||||||
|
for f in files:
|
||||||
|
if str(f).endswith('.yml') or str(f).endswith('.yaml'):
|
||||||
|
lines = self.search_keywords(os.path.join(root, f))
|
||||||
|
if len(lines) > 0:
|
||||||
|
for line in lines:
|
||||||
|
result.append({'file': f, 'line': line[0], 'keyword': line[1]})
|
||||||
|
return result
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def search_keywords(file):
|
||||||
|
result = []
|
||||||
|
with open(file, 'r') as f:
|
||||||
|
for line_num, line in enumerate(f):
|
||||||
|
for keyword in dangerous_keywords:
|
||||||
|
if keyword in line.replace(' ', ''):
|
||||||
|
result.append((line_num, keyword))
|
||||||
|
return result
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def entry(self):
|
def entry(self):
|
||||||
work_dir = self.work_dir
|
work_dir = self.work_dir
|
||||||
|
@ -33,3 +61,7 @@ class Playbook(JMSOrgBaseModel):
|
||||||
def work_dir(self):
|
def work_dir(self):
|
||||||
work_dir = os.path.join(settings.DATA_DIR, "ops", "playbook", self.id.__str__())
|
work_dir = os.path.join(settings.DATA_DIR, "ops", "playbook", self.id.__str__())
|
||||||
return work_dir
|
return work_dir
|
||||||
|
|
||||||
|
class Meta:
|
||||||
|
unique_together = [('name', 'org_id', 'creator')]
|
||||||
|
ordering = ['date_created']
|
||||||
|
|
|
@ -1,3 +1,5 @@
|
||||||
|
import uuid
|
||||||
|
|
||||||
from django.utils.translation import gettext_lazy as _
|
from django.utils.translation import gettext_lazy as _
|
||||||
from rest_framework import serializers
|
from rest_framework import serializers
|
||||||
|
|
||||||
|
@ -14,6 +16,14 @@ class JobSerializer(BulkOrgResourceModelSerializer, PeriodTaskSerializerMixin):
|
||||||
run_after_save = serializers.BooleanField(label=_("Run after save"), default=False, required=False)
|
run_after_save = serializers.BooleanField(label=_("Run after save"), default=False, required=False)
|
||||||
nodes = serializers.ListField(required=False, child=serializers.CharField())
|
nodes = serializers.ListField(required=False, child=serializers.CharField())
|
||||||
date_last_run = serializers.DateTimeField(label=_('Date last run'), read_only=True)
|
date_last_run = serializers.DateTimeField(label=_('Date last run'), read_only=True)
|
||||||
|
name = serializers.CharField(label=_('Name'), max_length=128, allow_blank=True, required=False)
|
||||||
|
|
||||||
|
def to_internal_value(self, data):
|
||||||
|
instant = data.get('instant', False)
|
||||||
|
if instant:
|
||||||
|
_uid = str(uuid.uuid4()).split('-')[-1]
|
||||||
|
data['name'] = f'job-{_uid}'
|
||||||
|
return super().to_internal_value(data)
|
||||||
|
|
||||||
def get_request_user(self):
|
def get_request_user(self):
|
||||||
request = self.context.get('request')
|
request = self.context.get('request')
|
||||||
|
|
|
@ -76,7 +76,7 @@ model_cache_field_mapper = {
|
||||||
class OrgResourceStatisticsRefreshUtil:
|
class OrgResourceStatisticsRefreshUtil:
|
||||||
@staticmethod
|
@staticmethod
|
||||||
@merge_delay_run(ttl=5)
|
@merge_delay_run(ttl=5)
|
||||||
def refresh_org_fields(*org_fields):
|
def refresh_org_fields(org_fields=()):
|
||||||
for org, cache_field_name in org_fields:
|
for org, cache_field_name in org_fields:
|
||||||
OrgResourceStatisticsCache(org).expire(*cache_field_name)
|
OrgResourceStatisticsCache(org).expire(*cache_field_name)
|
||||||
OrgResourceStatisticsCache(Organization.root()).expire(*cache_field_name)
|
OrgResourceStatisticsCache(Organization.root()).expire(*cache_field_name)
|
||||||
|
|
|
@ -38,7 +38,7 @@ class PermAccountUtil(AssetPermissionUtil):
|
||||||
alias_action_bit_mapper[alias] |= perm.actions
|
alias_action_bit_mapper[alias] |= perm.actions
|
||||||
alias_expired_mapper[alias].append(perm.date_expired)
|
alias_expired_mapper[alias].append(perm.date_expired)
|
||||||
|
|
||||||
asset_accounts = asset.accounts.all()
|
asset_accounts = asset.accounts.all().active()
|
||||||
username_account_mapper = {account.username: account for account in asset_accounts}
|
username_account_mapper = {account.username: account for account in asset_accounts}
|
||||||
|
|
||||||
cleaned_accounts_action_bit = defaultdict(int)
|
cleaned_accounts_action_bit = defaultdict(int)
|
||||||
|
|
|
@ -89,11 +89,15 @@ class RoleViewSet(JMSModelViewSet):
|
||||||
|
|
||||||
|
|
||||||
class SystemRoleViewSet(RoleViewSet):
|
class SystemRoleViewSet(RoleViewSet):
|
||||||
|
perm_model = SystemRole
|
||||||
|
|
||||||
def get_queryset(self):
|
def get_queryset(self):
|
||||||
return super().get_queryset().filter(scope='system')
|
return super().get_queryset().filter(scope='system')
|
||||||
|
|
||||||
|
|
||||||
class OrgRoleViewSet(RoleViewSet):
|
class OrgRoleViewSet(RoleViewSet):
|
||||||
|
perm_model = OrgRole
|
||||||
|
|
||||||
def get_queryset(self):
|
def get_queryset(self):
|
||||||
return super().get_queryset().filter(scope='org')
|
return super().get_queryset().filter(scope='org')
|
||||||
|
|
||||||
|
|
|
@ -2,13 +2,14 @@
|
||||||
#
|
#
|
||||||
|
|
||||||
from smtplib import SMTPSenderRefused
|
from smtplib import SMTPSenderRefused
|
||||||
from rest_framework.views import Response, APIView
|
|
||||||
|
from django.conf import settings
|
||||||
from django.core.mail import send_mail, get_connection
|
from django.core.mail import send_mail, get_connection
|
||||||
from django.utils.translation import ugettext_lazy as _
|
from django.utils.translation import ugettext_lazy as _
|
||||||
|
from rest_framework.views import Response, APIView
|
||||||
|
|
||||||
from common.utils import get_logger
|
from common.utils import get_logger
|
||||||
from .. import serializers
|
from .. import serializers
|
||||||
from django.conf import settings
|
|
||||||
|
|
||||||
logger = get_logger(__file__)
|
logger = get_logger(__file__)
|
||||||
|
|
||||||
|
@ -41,7 +42,7 @@ class MailTestingAPI(APIView):
|
||||||
# if k.startswith('EMAIL'):
|
# if k.startswith('EMAIL'):
|
||||||
# setattr(settings, k, v)
|
# setattr(settings, k, v)
|
||||||
try:
|
try:
|
||||||
subject = "Test"
|
subject = settings.EMAIL_SUBJECT_PREFIX + "Test"
|
||||||
message = "Test smtp setting"
|
message = "Test smtp setting"
|
||||||
email_from = email_from or email_host_user
|
email_from = email_from or email_host_user
|
||||||
email_recipient = email_recipient or email_from
|
email_recipient = email_recipient or email_from
|
||||||
|
|
|
@ -103,9 +103,9 @@ class CommandViewSet(JMSBulkModelViewSet):
|
||||||
command_store = get_command_storage()
|
command_store = get_command_storage()
|
||||||
serializer_class = SessionCommandSerializer
|
serializer_class = SessionCommandSerializer
|
||||||
filterset_class = CommandFilter
|
filterset_class = CommandFilter
|
||||||
search_fields = ('input',)
|
|
||||||
model = Command
|
model = Command
|
||||||
ordering_fields = ('timestamp',)
|
search_fields = ('input',)
|
||||||
|
ordering_fields = ('timestamp', 'risk_level')
|
||||||
|
|
||||||
def merge_all_storage_list(self, request, *args, **kwargs):
|
def merge_all_storage_list(self, request, *args, **kwargs):
|
||||||
merged_commands = []
|
merged_commands = []
|
||||||
|
|
|
@ -162,6 +162,12 @@ def default_chrome_driver_options():
|
||||||
options.add_argument("start-maximized")
|
options.add_argument("start-maximized")
|
||||||
# 禁用 扩展
|
# 禁用 扩展
|
||||||
options.add_argument("--disable-extensions")
|
options.add_argument("--disable-extensions")
|
||||||
|
# 忽略证书错误相关
|
||||||
|
options.add_argument('--ignore-ssl-errors')
|
||||||
|
options.add_argument('--ignore-certificate-errors')
|
||||||
|
options.add_argument('--ignore-certificate-errors-spki-list')
|
||||||
|
options.add_argument('--allow-running-insecure-content')
|
||||||
|
|
||||||
# 禁用开发者工具
|
# 禁用开发者工具
|
||||||
options.add_argument("--disable-dev-tools")
|
options.add_argument("--disable-dev-tools")
|
||||||
# 禁用 密码管理器弹窗
|
# 禁用 密码管理器弹窗
|
||||||
|
|
|
@ -59,12 +59,12 @@ def check_pid_alive(pid) -> bool:
|
||||||
content = decode_content(csv_ret)
|
content = decode_content(csv_ret)
|
||||||
content_list = content.strip().split("\r\n")
|
content_list = content.strip().split("\r\n")
|
||||||
if len(content_list) != 2:
|
if len(content_list) != 2:
|
||||||
notify_err_message(content)
|
print("check pid {} ret invalid: {}".format(pid, content))
|
||||||
return False
|
return False
|
||||||
ret_pid = content_list[1].split(",")[1].strip('"')
|
ret_pid = content_list[1].split(",")[1].strip('"')
|
||||||
return str(pid) == ret_pid
|
return str(pid) == ret_pid
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
notify_err_message(e)
|
print("check pid {} err: {}".format(pid, e))
|
||||||
return False
|
return False
|
||||||
|
|
||||||
|
|
||||||
|
@ -73,7 +73,7 @@ def wait_pid(pid):
|
||||||
time.sleep(5)
|
time.sleep(5)
|
||||||
ok = check_pid_alive(pid)
|
ok = check_pid_alive(pid)
|
||||||
if not ok:
|
if not ok:
|
||||||
notify_err_message("程序退出")
|
print("pid {} is not alive".format(pid))
|
||||||
break
|
break
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -59,14 +59,12 @@ def check_pid_alive(pid) -> bool:
|
||||||
content = decode_content(csv_ret)
|
content = decode_content(csv_ret)
|
||||||
content_list = content.strip().split("\r\n")
|
content_list = content.strip().split("\r\n")
|
||||||
if len(content_list) != 2:
|
if len(content_list) != 2:
|
||||||
notify_err_message(content)
|
print("check pid {} ret invalid: {}".format(pid, content))
|
||||||
time.sleep(2)
|
|
||||||
return False
|
return False
|
||||||
ret_pid = content_list[1].split(",")[1].strip('"')
|
ret_pid = content_list[1].split(",")[1].strip('"')
|
||||||
return str(pid) == ret_pid
|
return str(pid) == ret_pid
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
notify_err_message(e)
|
print("check pid {} err: {}".format(pid, e))
|
||||||
time.sleep(2)
|
|
||||||
return False
|
return False
|
||||||
|
|
||||||
|
|
||||||
|
@ -75,8 +73,7 @@ def wait_pid(pid):
|
||||||
time.sleep(5)
|
time.sleep(5)
|
||||||
ok = check_pid_alive(pid)
|
ok = check_pid_alive(pid)
|
||||||
if not ok:
|
if not ok:
|
||||||
notify_err_message("程序退出")
|
print("pid {} is not alive".format(pid))
|
||||||
time.sleep(2)
|
|
||||||
break
|
break
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -40,7 +40,7 @@ def main():
|
||||||
default='all',
|
default='all',
|
||||||
help="resource to generate"
|
help="resource to generate"
|
||||||
)
|
)
|
||||||
parser.add_argument('-c', '--count', type=int, default=10000)
|
parser.add_argument('-c', '--count', type=int, default=1000)
|
||||||
parser.add_argument('-b', '--batch_size', type=int, default=100)
|
parser.add_argument('-b', '--batch_size', type=int, default=100)
|
||||||
parser.add_argument('-o', '--org', type=str, default='')
|
parser.add_argument('-o', '--org', type=str, default='')
|
||||||
args = parser.parse_args()
|
args = parser.parse_args()
|
||||||
|
|
|
@ -44,5 +44,6 @@ class FakeDataGenerator:
|
||||||
using = end - start
|
using = end - start
|
||||||
from_size = created
|
from_size = created
|
||||||
created += len(batch)
|
created += len(batch)
|
||||||
print('Generate %s: %s-%s [{}s]' % (self.resource, from_size, created, using))
|
print('Generate %s: %s-%s [%s]' % (self.resource, from_size, created, using))
|
||||||
self.after_generate()
|
self.after_generate()
|
||||||
|
time.sleep(20)
|
||||||
|
|
Loading…
Reference in New Issue