mirror of https://github.com/jumpserver/jumpserver
feat: 重构操作日志 (#8941)
* feat:重构操作日志模块 * feat: 改密计划增加操作日志记录 * feat: 支持操作日志接入ES,且接口limit支持自定义限制大小 * feat:翻译 * feat: 生成迁移文件 * feat: 优化迁移文件 * feat: 优化多对多日志记录 * feat: 命令存储ES部分和日志存储ES部分代码优化 * feat: 优化敏感字段脱敏 Co-authored-by: Jiangjie.Bai <bugatti_it@163.com>pull/8891/head^2
parent
1e97a23bc5
commit
2029e9f8df
|
@ -21,8 +21,8 @@ class Migration(migrations.Migration):
|
||||||
('name', models.CharField(max_length=64, verbose_name='Name')),
|
('name', models.CharField(max_length=64, verbose_name='Name')),
|
||||||
('is_active', models.BooleanField(default=True, verbose_name='Is active')),
|
('is_active', models.BooleanField(default=True, verbose_name='Is active')),
|
||||||
('comment', models.TextField(blank=True, default='', verbose_name='Comment')),
|
('comment', models.TextField(blank=True, default='', verbose_name='Comment')),
|
||||||
('date_created', models.DateTimeField(auto_now_add=True)),
|
('date_created', models.DateTimeField(auto_now_add=True, verbose_name='Date created')),
|
||||||
('date_updated', models.DateTimeField(auto_now=True)),
|
('date_updated', models.DateTimeField(auto_now=True, verbose_name='Date updated')),
|
||||||
('created_by', models.CharField(blank=True, default='', max_length=128, verbose_name='Created by')),
|
('created_by', models.CharField(blank=True, default='', max_length=128, verbose_name='Created by')),
|
||||||
],
|
],
|
||||||
options={
|
options={
|
||||||
|
|
|
@ -50,8 +50,8 @@ class CommandFilter(OrgModelMixin):
|
||||||
)
|
)
|
||||||
is_active = models.BooleanField(default=True, verbose_name=_('Is active'))
|
is_active = models.BooleanField(default=True, verbose_name=_('Is active'))
|
||||||
comment = models.TextField(blank=True, default='', verbose_name=_("Comment"))
|
comment = models.TextField(blank=True, default='', verbose_name=_("Comment"))
|
||||||
date_created = models.DateTimeField(auto_now_add=True)
|
date_created = models.DateTimeField(auto_now_add=True, verbose_name=_('Date created'))
|
||||||
date_updated = models.DateTimeField(auto_now=True)
|
date_updated = models.DateTimeField(auto_now=True, verbose_name=_('Date updated'))
|
||||||
created_by = models.CharField(
|
created_by = models.CharField(
|
||||||
max_length=128, blank=True, default='', verbose_name=_('Created by')
|
max_length=128, blank=True, default='', verbose_name=_('Created by')
|
||||||
)
|
)
|
||||||
|
|
|
@ -1,21 +1,29 @@
|
||||||
# -*- coding: utf-8 -*-
|
# -*- coding: utf-8 -*-
|
||||||
#
|
#
|
||||||
from rest_framework.mixins import ListModelMixin, CreateModelMixin
|
from importlib import import_module
|
||||||
|
|
||||||
|
from rest_framework.mixins import ListModelMixin, CreateModelMixin, RetrieveModelMixin
|
||||||
from django.db.models import F, Value
|
from django.db.models import F, Value
|
||||||
from django.db.models.functions import Concat
|
from django.db.models.functions import Concat
|
||||||
|
from django.conf import settings
|
||||||
from rest_framework.permissions import IsAuthenticated
|
from rest_framework.permissions import IsAuthenticated
|
||||||
from rest_framework import generics
|
from rest_framework import generics
|
||||||
|
|
||||||
from common.drf.api import JMSReadOnlyModelViewSet
|
from common.drf.api import JMSReadOnlyModelViewSet
|
||||||
|
from common.plugins.es import QuerySet as ESQuerySet
|
||||||
from common.drf.filters import DatetimeRangeFilter
|
from common.drf.filters import DatetimeRangeFilter
|
||||||
from common.api import CommonGenericViewSet
|
from common.api import CommonGenericViewSet
|
||||||
from orgs.mixins.api import OrgGenericViewSet, OrgBulkModelViewSet, OrgRelationMixin
|
from orgs.mixins.api import OrgGenericViewSet, OrgBulkModelViewSet, OrgRelationMixin
|
||||||
from orgs.utils import current_org
|
from orgs.utils import current_org
|
||||||
from ops.models import CommandExecution
|
from ops.models import CommandExecution
|
||||||
from . import filters
|
from . import filters
|
||||||
|
from .backends import TYPE_ENGINE_MAPPING
|
||||||
from .models import FTPLog, UserLoginLog, OperateLog, PasswordChangeLog
|
from .models import FTPLog, UserLoginLog, OperateLog, PasswordChangeLog
|
||||||
from .serializers import FTPLogSerializer, UserLoginLogSerializer, CommandExecutionSerializer
|
from .serializers import FTPLogSerializer, UserLoginLogSerializer, CommandExecutionSerializer
|
||||||
from .serializers import OperateLogSerializer, PasswordChangeLogSerializer, CommandExecutionHostsRelationSerializer
|
from .serializers import (
|
||||||
|
OperateLogSerializer, OperateLogActionDetailSerializer,
|
||||||
|
PasswordChangeLogSerializer, CommandExecutionHostsRelationSerializer
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
class FTPLogViewSet(CreateModelMixin,
|
class FTPLogViewSet(CreateModelMixin,
|
||||||
|
@ -68,7 +76,7 @@ class MyLoginLogAPIView(UserLoginCommonMixin, generics.ListAPIView):
|
||||||
return qs
|
return qs
|
||||||
|
|
||||||
|
|
||||||
class OperateLogViewSet(ListModelMixin, OrgGenericViewSet):
|
class OperateLogViewSet(RetrieveModelMixin, ListModelMixin, OrgGenericViewSet):
|
||||||
model = OperateLog
|
model = OperateLog
|
||||||
serializer_class = OperateLogSerializer
|
serializer_class = OperateLogSerializer
|
||||||
extra_filter_backends = [DatetimeRangeFilter]
|
extra_filter_backends = [DatetimeRangeFilter]
|
||||||
|
@ -79,6 +87,22 @@ class OperateLogViewSet(ListModelMixin, OrgGenericViewSet):
|
||||||
search_fields = ['resource']
|
search_fields = ['resource']
|
||||||
ordering = ['-datetime']
|
ordering = ['-datetime']
|
||||||
|
|
||||||
|
def get_serializer_class(self):
|
||||||
|
if self.request.query_params.get('type') == 'action_detail':
|
||||||
|
return OperateLogActionDetailSerializer
|
||||||
|
return super().get_serializer_class()
|
||||||
|
|
||||||
|
def get_queryset(self):
|
||||||
|
qs = OperateLog.objects.all()
|
||||||
|
es_config = settings.OPERATE_LOG_ELASTICSEARCH_CONFIG
|
||||||
|
if es_config:
|
||||||
|
engine_mod = import_module(TYPE_ENGINE_MAPPING['es'])
|
||||||
|
store = engine_mod.OperateLogStore(es_config)
|
||||||
|
if store.ping(timeout=2):
|
||||||
|
qs = ESQuerySet(store)
|
||||||
|
qs.model = OperateLog
|
||||||
|
return qs
|
||||||
|
|
||||||
|
|
||||||
class PasswordChangeLogViewSet(ListModelMixin, CommonGenericViewSet):
|
class PasswordChangeLogViewSet(ListModelMixin, CommonGenericViewSet):
|
||||||
queryset = PasswordChangeLog.objects.all()
|
queryset = PasswordChangeLog.objects.all()
|
||||||
|
|
|
@ -0,0 +1,18 @@
|
||||||
|
from importlib import import_module
|
||||||
|
|
||||||
|
from django.conf import settings
|
||||||
|
|
||||||
|
|
||||||
|
TYPE_ENGINE_MAPPING = {
|
||||||
|
'db': 'audits.backends.db',
|
||||||
|
'es': 'audits.backends.es',
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
def get_operate_log_storage(default=False):
|
||||||
|
engine_mod = import_module(TYPE_ENGINE_MAPPING['db'])
|
||||||
|
es_config = settings.OPERATE_LOG_ELASTICSEARCH_CONFIG
|
||||||
|
if not default and es_config:
|
||||||
|
engine_mod = import_module(TYPE_ENGINE_MAPPING['es'])
|
||||||
|
storage = engine_mod.OperateLogStore(es_config)
|
||||||
|
return storage
|
|
@ -0,0 +1,38 @@
|
||||||
|
# ~*~ coding: utf-8 ~*~
|
||||||
|
from django.utils.translation import ugettext_lazy as _
|
||||||
|
|
||||||
|
from audits.models import OperateLog
|
||||||
|
|
||||||
|
|
||||||
|
class OperateLogStore(object):
|
||||||
|
def __init__(self, config):
|
||||||
|
self.model = OperateLog
|
||||||
|
self.max_length = 1024
|
||||||
|
self.max_length_tip_msg = _(
|
||||||
|
'The text content is too long. Use Elasticsearch to store operation logs'
|
||||||
|
)
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def ping(timeout=None):
|
||||||
|
return True
|
||||||
|
|
||||||
|
def save(self, **kwargs):
|
||||||
|
log_id = kwargs.get('id', '')
|
||||||
|
before = kwargs.get('before') or {}
|
||||||
|
after = kwargs.get('after') or {}
|
||||||
|
if len(str(before)) > self.max_length:
|
||||||
|
before = {_('Tips'): self.max_length_tip_msg}
|
||||||
|
if len(str(after)) > self.max_length:
|
||||||
|
after = {_('Tips'): self.max_length_tip_msg}
|
||||||
|
|
||||||
|
op_log = self.model.objects.filter(pk=log_id).first()
|
||||||
|
if op_log is not None:
|
||||||
|
raw_after = op_log.after or {}
|
||||||
|
raw_before = op_log.before or {}
|
||||||
|
raw_before.update(before)
|
||||||
|
raw_after.update(after)
|
||||||
|
op_log.before = raw_before
|
||||||
|
op_log.after = raw_after
|
||||||
|
op_log.save()
|
||||||
|
else:
|
||||||
|
self.model.objects.create(**kwargs)
|
|
@ -0,0 +1,85 @@
|
||||||
|
# -*- coding: utf-8 -*-
|
||||||
|
#
|
||||||
|
import uuid
|
||||||
|
|
||||||
|
from common.utils.timezone import local_now_display
|
||||||
|
from common.utils import get_logger
|
||||||
|
from common.utils.encode import Singleton
|
||||||
|
from common.plugins.es import ES
|
||||||
|
|
||||||
|
|
||||||
|
logger = get_logger(__file__)
|
||||||
|
|
||||||
|
|
||||||
|
class OperateLogStore(ES, metaclass=Singleton):
|
||||||
|
def __init__(self, config):
|
||||||
|
properties = {
|
||||||
|
"id": {
|
||||||
|
"type": "keyword"
|
||||||
|
},
|
||||||
|
"user": {
|
||||||
|
"type": "keyword"
|
||||||
|
},
|
||||||
|
"action": {
|
||||||
|
"type": "keyword"
|
||||||
|
},
|
||||||
|
"resource_type": {
|
||||||
|
"type": "keyword"
|
||||||
|
},
|
||||||
|
"org_id": {
|
||||||
|
"type": "keyword"
|
||||||
|
},
|
||||||
|
"datetime": {
|
||||||
|
"type": "date",
|
||||||
|
"format": "yyyy-MM-dd HH:mm:ss"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
exact_fields = {}
|
||||||
|
match_fields = {
|
||||||
|
'id', 'user', 'action', 'resource_type',
|
||||||
|
'resource', 'remote_addr', 'org_id'
|
||||||
|
}
|
||||||
|
keyword_fields = {
|
||||||
|
'id', 'user', 'action', 'resource_type', 'org_id'
|
||||||
|
}
|
||||||
|
if not config.get('INDEX'):
|
||||||
|
config['INDEX'] = 'jumpserver_operate_log'
|
||||||
|
super().__init__(config, properties, keyword_fields, exact_fields, match_fields)
|
||||||
|
self.pre_use_check()
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def make_data(data):
|
||||||
|
op_id = data.get('id', str(uuid.uuid4()))
|
||||||
|
datetime_param = data.get('datetime', local_now_display())
|
||||||
|
data = {
|
||||||
|
'id': op_id, 'user': data['user'], 'action': data['action'],
|
||||||
|
'resource_type': data['resource_type'], 'resource': data['resource'],
|
||||||
|
'remote_addr': data['remote_addr'], 'datetime': datetime_param,
|
||||||
|
'before': data['before'], 'after': data['after'], 'org_id': data['org_id']
|
||||||
|
}
|
||||||
|
return data
|
||||||
|
|
||||||
|
def save(self, **kwargs):
|
||||||
|
log_id = kwargs.get('id', '')
|
||||||
|
before = kwargs.get('before') or {}
|
||||||
|
after = kwargs.get('after') or {}
|
||||||
|
|
||||||
|
op_log = self.get({'id': log_id})
|
||||||
|
if op_log is not None:
|
||||||
|
data = {'doc': {}}
|
||||||
|
raw_after = op_log.get('after') or {}
|
||||||
|
raw_before = op_log.get('before') or {}
|
||||||
|
raw_before.update(before)
|
||||||
|
raw_after.update(after)
|
||||||
|
data['doc']['before'] = raw_before
|
||||||
|
data['doc']['after'] = raw_after
|
||||||
|
self.es.update(
|
||||||
|
index=self.index, doc_type=self.doc_type,
|
||||||
|
id=op_log.get('es_id'), body=data, refresh=True
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
data = self.make_data(kwargs)
|
||||||
|
self.es.index(
|
||||||
|
index=self.index, doc_type=self.doc_type, body=data,
|
||||||
|
refresh=True
|
||||||
|
)
|
|
@ -7,11 +7,13 @@ DEFAULT_CITY = _("Unknown")
|
||||||
MODELS_NEED_RECORD = (
|
MODELS_NEED_RECORD = (
|
||||||
# users
|
# users
|
||||||
'User', 'UserGroup',
|
'User', 'UserGroup',
|
||||||
|
# authentication
|
||||||
|
'AccessKey', 'TempToken',
|
||||||
# acls
|
# acls
|
||||||
'LoginACL', 'LoginAssetACL', 'LoginConfirmSetting',
|
'LoginACL', 'LoginAssetACL', 'LoginConfirmSetting',
|
||||||
# assets
|
# assets
|
||||||
'Asset', 'Node', 'AdminUser', 'SystemUser', 'Domain', 'Gateway', 'CommandFilterRule',
|
'Asset', 'Node', 'AdminUser', 'SystemUser', 'Domain', 'Gateway', 'CommandFilterRule',
|
||||||
'CommandFilter', 'Platform', 'AuthBook',
|
'CommandFilter', 'Platform', 'Label',
|
||||||
# applications
|
# applications
|
||||||
'Application',
|
'Application',
|
||||||
# orgs
|
# orgs
|
||||||
|
@ -20,6 +22,13 @@ MODELS_NEED_RECORD = (
|
||||||
'Setting',
|
'Setting',
|
||||||
# perms
|
# perms
|
||||||
'AssetPermission', 'ApplicationPermission',
|
'AssetPermission', 'ApplicationPermission',
|
||||||
|
# notifications
|
||||||
|
'SystemMsgSubscription', 'UserMsgSubscription',
|
||||||
|
# Terminal
|
||||||
|
'Terminal', 'Endpoint', 'EndpointRule', 'CommandStorage', 'ReplayStorage',
|
||||||
|
# rbac
|
||||||
|
'Role', 'SystemRole', 'OrgRole', 'RoleBinding', 'OrgRoleBinding', 'SystemRoleBinding',
|
||||||
# xpack
|
# xpack
|
||||||
'License', 'Account', 'SyncInstanceTask', 'ChangeAuthPlan', 'GatherUserTask',
|
'License', 'Account', 'SyncInstanceTask', 'ChangeAuthPlan', 'ApplicationChangeAuthPlan',
|
||||||
|
'GatherUserTask', 'Interface',
|
||||||
)
|
)
|
||||||
|
|
|
@ -0,0 +1,183 @@
|
||||||
|
from datetime import datetime
|
||||||
|
|
||||||
|
from django.db import transaction
|
||||||
|
from django.core.cache import cache
|
||||||
|
from django.utils.translation import ugettext_lazy as _
|
||||||
|
|
||||||
|
from common.utils import get_request_ip, get_logger
|
||||||
|
from common.utils.timezone import as_current_tz
|
||||||
|
from common.utils.encode import Singleton
|
||||||
|
from common.local import encrypted_field_set
|
||||||
|
from settings.serializers import SettingsSerializer
|
||||||
|
from jumpserver.utils import current_request
|
||||||
|
from audits.models import OperateLog
|
||||||
|
from orgs.utils import get_current_org_id
|
||||||
|
|
||||||
|
from .backends import get_operate_log_storage
|
||||||
|
|
||||||
|
|
||||||
|
logger = get_logger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
class ModelClient:
|
||||||
|
@staticmethod
|
||||||
|
def save(**kwargs):
|
||||||
|
log_id = kwargs.get('id', '')
|
||||||
|
op_log = OperateLog.objects.filter(pk=log_id).first()
|
||||||
|
if op_log is not None:
|
||||||
|
raw_after = op_log.after or {}
|
||||||
|
raw_before = op_log.before or {}
|
||||||
|
cur_before = kwargs.get('before') or {}
|
||||||
|
cur_after = kwargs.get('after') or {}
|
||||||
|
raw_before.update(cur_before)
|
||||||
|
raw_after.update(cur_after)
|
||||||
|
op_log.before = raw_before
|
||||||
|
op_log.after = raw_after
|
||||||
|
op_log.save()
|
||||||
|
else:
|
||||||
|
OperateLog.objects.create(**kwargs)
|
||||||
|
|
||||||
|
|
||||||
|
class OperatorLogHandler(metaclass=Singleton):
|
||||||
|
CACHE_KEY = 'OPERATOR_LOG_CACHE_KEY'
|
||||||
|
|
||||||
|
def __init__(self):
|
||||||
|
self.log_client = self.get_storage_client()
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def get_storage_client():
|
||||||
|
client = get_operate_log_storage()
|
||||||
|
return client
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def _consistent_type_to_str(value1, value2):
|
||||||
|
if isinstance(value1, datetime):
|
||||||
|
value1 = as_current_tz(value1).strftime('%Y-%m-%d %H:%M:%S')
|
||||||
|
if isinstance(value2, datetime):
|
||||||
|
value2 = as_current_tz(value2).strftime('%Y-%m-%d %H:%M:%S')
|
||||||
|
return value1, value2
|
||||||
|
|
||||||
|
def _look_for_two_dict_change(self, left_dict, right_dict):
|
||||||
|
# 以右边的字典为基础
|
||||||
|
before, after = {}, {}
|
||||||
|
for key, value in right_dict.items():
|
||||||
|
pre_value = left_dict.get(key, '')
|
||||||
|
pre_value, value = self._consistent_type_to_str(pre_value, value)
|
||||||
|
if sorted(str(value)) == sorted(str(pre_value)):
|
||||||
|
continue
|
||||||
|
if pre_value:
|
||||||
|
before[key] = pre_value
|
||||||
|
if value:
|
||||||
|
after[key] = value
|
||||||
|
return before, after
|
||||||
|
|
||||||
|
def cache_instance_before_data(self, instance_dict):
|
||||||
|
instance_id = instance_dict.get('id')
|
||||||
|
if instance_id is None:
|
||||||
|
return
|
||||||
|
|
||||||
|
key = '%s_%s' % (self.CACHE_KEY, instance_id)
|
||||||
|
cache.set(key, instance_dict, 3 * 60)
|
||||||
|
|
||||||
|
def get_instance_dict_from_cache(self, instance_id):
|
||||||
|
if instance_id is None:
|
||||||
|
return None
|
||||||
|
|
||||||
|
key = '%s_%s' % (self.CACHE_KEY, instance_id)
|
||||||
|
cache_instance = cache.get(key, {})
|
||||||
|
log_id = cache_instance.get('operate_log_id')
|
||||||
|
return log_id, cache_instance
|
||||||
|
|
||||||
|
def get_instance_current_with_cache_diff(self, current_instance):
|
||||||
|
log_id, before, after = None, None, None
|
||||||
|
instance_id = current_instance.get('id')
|
||||||
|
if instance_id is None:
|
||||||
|
return log_id, before, after
|
||||||
|
|
||||||
|
log_id, cache_instance = self.get_instance_dict_from_cache(instance_id)
|
||||||
|
if not cache_instance:
|
||||||
|
return log_id, before, after
|
||||||
|
|
||||||
|
before, after = self._look_for_two_dict_change(
|
||||||
|
cache_instance, current_instance
|
||||||
|
)
|
||||||
|
return log_id, before, after
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def get_resource_display_from_setting(resource):
|
||||||
|
resource_display = None
|
||||||
|
setting_serializer = SettingsSerializer()
|
||||||
|
label = setting_serializer.get_field_label(resource)
|
||||||
|
if label is not None:
|
||||||
|
resource_display = label
|
||||||
|
return resource_display
|
||||||
|
|
||||||
|
def get_resource_display(self, resource):
|
||||||
|
resource_display = str(resource)
|
||||||
|
return_value = self.get_resource_display_from_setting(resource_display)
|
||||||
|
if return_value is not None:
|
||||||
|
resource_display = return_value
|
||||||
|
return resource_display
|
||||||
|
|
||||||
|
def __data_processing(self, dict_item, loop=True):
|
||||||
|
encrypt_value = '******'
|
||||||
|
for key, value in dict_item.items():
|
||||||
|
if isinstance(value, bool):
|
||||||
|
value = _('Yes') if value else _('No')
|
||||||
|
elif isinstance(value, (list, tuple)):
|
||||||
|
value = ','.join(value)
|
||||||
|
elif isinstance(value, dict) and loop:
|
||||||
|
self.__data_processing(value, loop=False)
|
||||||
|
if key in encrypted_field_set:
|
||||||
|
value = encrypt_value
|
||||||
|
dict_item[key] = value
|
||||||
|
return dict_item
|
||||||
|
|
||||||
|
def data_processing(self, before, after):
|
||||||
|
if before:
|
||||||
|
before = self.__data_processing(before)
|
||||||
|
if after:
|
||||||
|
after = self.__data_processing(after)
|
||||||
|
return before, after
|
||||||
|
|
||||||
|
def create_or_update_operate_log(
|
||||||
|
self, action, resource_type, resource=None,
|
||||||
|
force=False, log_id=None, before=None, after=None
|
||||||
|
):
|
||||||
|
user = current_request.user if current_request else None
|
||||||
|
if not user or not user.is_authenticated:
|
||||||
|
return
|
||||||
|
|
||||||
|
remote_addr = get_request_ip(current_request)
|
||||||
|
resource_display = self.get_resource_display(resource)
|
||||||
|
before, after = self.data_processing(before, after)
|
||||||
|
if not force and not any([before, after]):
|
||||||
|
# 前后都没变化,没必要生成日志,除非手动强制保存
|
||||||
|
return
|
||||||
|
|
||||||
|
data = {
|
||||||
|
'id': log_id, "user": str(user), 'action': action,
|
||||||
|
'resource_type': str(resource_type), 'resource': resource_display,
|
||||||
|
'remote_addr': remote_addr, 'before': before, 'after': after,
|
||||||
|
'org_id': get_current_org_id(),
|
||||||
|
}
|
||||||
|
with transaction.atomic():
|
||||||
|
if self.log_client.ping(timeout=1):
|
||||||
|
client = self.log_client
|
||||||
|
else:
|
||||||
|
logger.info('Switch default operate log storage save.')
|
||||||
|
client = get_operate_log_storage(default=True)
|
||||||
|
|
||||||
|
try:
|
||||||
|
client.save(**data)
|
||||||
|
except Exception as e:
|
||||||
|
error_msg = 'An error occurred saving OperateLog.' \
|
||||||
|
'Error: %s, Data: %s' % (e, data)
|
||||||
|
logger.error(error_msg)
|
||||||
|
|
||||||
|
|
||||||
|
op_handler = OperatorLogHandler()
|
||||||
|
create_or_update_operate_log = op_handler.create_or_update_operate_log
|
||||||
|
cache_instance_before_data = op_handler.cache_instance_before_data
|
||||||
|
get_instance_current_with_cache_diff = op_handler.get_instance_current_with_cache_diff
|
||||||
|
get_instance_dict_from_cache = op_handler.get_instance_dict_from_cache
|
|
@ -0,0 +1,24 @@
|
||||||
|
# Generated by Django 3.2.14 on 2022-10-11 09:45
|
||||||
|
|
||||||
|
import common.db.encoder
|
||||||
|
from django.db import migrations, models
|
||||||
|
|
||||||
|
|
||||||
|
class Migration(migrations.Migration):
|
||||||
|
|
||||||
|
dependencies = [
|
||||||
|
('audits', '0014_auto_20220505_1902'),
|
||||||
|
]
|
||||||
|
|
||||||
|
operations = [
|
||||||
|
migrations.AddField(
|
||||||
|
model_name='operatelog',
|
||||||
|
name='after',
|
||||||
|
field=models.JSONField(default=dict, encoder=common.db.encoder.ModelJSONFieldEncoder, null=True),
|
||||||
|
),
|
||||||
|
migrations.AddField(
|
||||||
|
model_name='operatelog',
|
||||||
|
name='before',
|
||||||
|
field=models.JSONField(default=dict, encoder=common.db.encoder.ModelJSONFieldEncoder, null=True),
|
||||||
|
),
|
||||||
|
]
|
|
@ -4,8 +4,9 @@ from django.db import models
|
||||||
from django.db.models import Q
|
from django.db.models import Q
|
||||||
from django.utils.translation import gettext, ugettext_lazy as _
|
from django.utils.translation import gettext, ugettext_lazy as _
|
||||||
from django.utils import timezone
|
from django.utils import timezone
|
||||||
from common.utils import lazyproperty
|
|
||||||
|
|
||||||
|
from common.utils import lazyproperty
|
||||||
|
from common.db.encoder import ModelJSONFieldEncoder
|
||||||
from orgs.mixins.models import OrgModelMixin, Organization
|
from orgs.mixins.models import OrgModelMixin, Organization
|
||||||
from orgs.utils import current_org
|
from orgs.utils import current_org
|
||||||
|
|
||||||
|
@ -65,6 +66,8 @@ class OperateLog(OrgModelMixin):
|
||||||
resource = models.CharField(max_length=128, verbose_name=_("Resource"))
|
resource = models.CharField(max_length=128, verbose_name=_("Resource"))
|
||||||
remote_addr = models.CharField(max_length=128, verbose_name=_("Remote addr"), blank=True, null=True)
|
remote_addr = models.CharField(max_length=128, verbose_name=_("Remote addr"), blank=True, null=True)
|
||||||
datetime = models.DateTimeField(auto_now=True, verbose_name=_('Datetime'), db_index=True)
|
datetime = models.DateTimeField(auto_now=True, verbose_name=_('Datetime'), db_index=True)
|
||||||
|
before = models.JSONField(default=dict, encoder=ModelJSONFieldEncoder, null=True)
|
||||||
|
after = models.JSONField(default=dict, encoder=ModelJSONFieldEncoder, null=True)
|
||||||
|
|
||||||
def __str__(self):
|
def __str__(self):
|
||||||
return "<{}> {} <{}>".format(self.user, self.action, self.resource)
|
return "<{}> {} <{}>".format(self.user, self.action, self.resource)
|
||||||
|
@ -78,6 +81,21 @@ class OperateLog(OrgModelMixin):
|
||||||
self.org_id = Organization.ROOT_ID
|
self.org_id = Organization.ROOT_ID
|
||||||
return super(OperateLog, self).save(*args, **kwargs)
|
return super(OperateLog, self).save(*args, **kwargs)
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def from_dict(cls, d):
|
||||||
|
self = cls()
|
||||||
|
for k, v in d.items():
|
||||||
|
setattr(self, k, v)
|
||||||
|
return self
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def from_multi_dict(cls, l):
|
||||||
|
operate_logs = []
|
||||||
|
for d in l:
|
||||||
|
operate_log = cls.from_dict(d)
|
||||||
|
operate_logs.append(operate_log)
|
||||||
|
return operate_logs
|
||||||
|
|
||||||
class Meta:
|
class Meta:
|
||||||
verbose_name = _("Operate log")
|
verbose_name = _("Operate log")
|
||||||
|
|
||||||
|
|
|
@ -47,6 +47,12 @@ class UserLoginLogSerializer(serializers.ModelSerializer):
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
class OperateLogActionDetailSerializer(serializers.ModelSerializer):
|
||||||
|
class Meta:
|
||||||
|
model = models.OperateLog
|
||||||
|
fields = ('before', 'after')
|
||||||
|
|
||||||
|
|
||||||
class OperateLogSerializer(serializers.ModelSerializer):
|
class OperateLogSerializer(serializers.ModelSerializer):
|
||||||
action_display = serializers.CharField(source='get_action_display', label=_('Action'))
|
action_display = serializers.CharField(source='get_action_display', label=_('Action'))
|
||||||
|
|
||||||
|
|
|
@ -1,9 +1,9 @@
|
||||||
# -*- coding: utf-8 -*-
|
# -*- coding: utf-8 -*-
|
||||||
#
|
#
|
||||||
import time
|
import uuid
|
||||||
|
|
||||||
from django.db.models.signals import (
|
from django.db.models.signals import (
|
||||||
post_save, m2m_changed, pre_delete
|
post_save, m2m_changed, pre_delete, pre_save
|
||||||
)
|
)
|
||||||
from django.dispatch import receiver
|
from django.dispatch import receiver
|
||||||
from django.conf import settings
|
from django.conf import settings
|
||||||
|
@ -16,24 +16,32 @@ from django.utils import translation
|
||||||
from rest_framework.renderers import JSONRenderer
|
from rest_framework.renderers import JSONRenderer
|
||||||
from rest_framework.request import Request
|
from rest_framework.request import Request
|
||||||
|
|
||||||
from assets.models import Asset, SystemUser
|
from users.models import User
|
||||||
|
from assets.models import Asset, SystemUser, CommandFilter
|
||||||
|
from terminal.models import Session, Command
|
||||||
|
from perms.models import AssetPermission, ApplicationPermission
|
||||||
|
from rbac.models import Role
|
||||||
|
|
||||||
|
from audits.utils import model_to_dict_for_operate_log as model_to_dict
|
||||||
|
from audits.handler import (
|
||||||
|
get_instance_current_with_cache_diff, cache_instance_before_data,
|
||||||
|
create_or_update_operate_log, get_instance_dict_from_cache
|
||||||
|
)
|
||||||
from authentication.signals import post_auth_failed, post_auth_success
|
from authentication.signals import post_auth_failed, post_auth_success
|
||||||
from authentication.utils import check_different_city_login_if_need
|
from authentication.utils import check_different_city_login_if_need
|
||||||
from jumpserver.utils import current_request
|
from jumpserver.utils import current_request
|
||||||
from users.models import User
|
|
||||||
from users.signals import post_user_change_password
|
from users.signals import post_user_change_password
|
||||||
from terminal.models import Session, Command
|
from .utils import write_login_log
|
||||||
from .utils import write_login_log, create_operate_log
|
|
||||||
from . import models, serializers
|
from . import models, serializers
|
||||||
from .models import OperateLog
|
from .models import OperateLog
|
||||||
from orgs.utils import current_org
|
from .const import MODELS_NEED_RECORD
|
||||||
from perms.models import AssetPermission, ApplicationPermission
|
|
||||||
from terminal.backends.command.serializers import SessionCommandSerializer
|
from terminal.backends.command.serializers import SessionCommandSerializer
|
||||||
from terminal.serializers import SessionSerializer
|
from terminal.serializers import SessionSerializer
|
||||||
from common.const.signals import POST_ADD, POST_REMOVE, POST_CLEAR
|
from common.const.signals import POST_ADD, POST_REMOVE, POST_CLEAR, SKIP_SIGNAL
|
||||||
from common.utils import get_request_ip, get_logger, get_syslogger
|
from common.utils import get_request_ip, get_logger, get_syslogger
|
||||||
from common.utils.encode import data_to_json
|
from common.utils.encode import data_to_json
|
||||||
|
|
||||||
|
|
||||||
logger = get_logger(__name__)
|
logger = get_logger(__name__)
|
||||||
sys_logger = get_syslogger(__name__)
|
sys_logger = get_syslogger(__name__)
|
||||||
json_render = JSONRenderer()
|
json_render = JSONRenderer()
|
||||||
|
@ -62,70 +70,6 @@ class AuthBackendLabelMapping(LazyObject):
|
||||||
|
|
||||||
AUTH_BACKEND_LABEL_MAPPING = AuthBackendLabelMapping()
|
AUTH_BACKEND_LABEL_MAPPING = AuthBackendLabelMapping()
|
||||||
|
|
||||||
|
|
||||||
M2M_NEED_RECORD = {
|
|
||||||
User.groups.through._meta.object_name: (
|
|
||||||
_('User and Group'),
|
|
||||||
_('{User} JOINED {UserGroup}'),
|
|
||||||
_('{User} LEFT {UserGroup}')
|
|
||||||
),
|
|
||||||
SystemUser.assets.through._meta.object_name: (
|
|
||||||
_('Asset and SystemUser'),
|
|
||||||
_('{Asset} ADD {SystemUser}'),
|
|
||||||
_('{Asset} REMOVE {SystemUser}')
|
|
||||||
),
|
|
||||||
Asset.nodes.through._meta.object_name: (
|
|
||||||
_('Node and Asset'),
|
|
||||||
_('{Node} ADD {Asset}'),
|
|
||||||
_('{Node} REMOVE {Asset}')
|
|
||||||
),
|
|
||||||
AssetPermission.users.through._meta.object_name: (
|
|
||||||
_('User asset permissions'),
|
|
||||||
_('{AssetPermission} ADD {User}'),
|
|
||||||
_('{AssetPermission} REMOVE {User}'),
|
|
||||||
),
|
|
||||||
AssetPermission.user_groups.through._meta.object_name: (
|
|
||||||
_('User group asset permissions'),
|
|
||||||
_('{AssetPermission} ADD {UserGroup}'),
|
|
||||||
_('{AssetPermission} REMOVE {UserGroup}'),
|
|
||||||
),
|
|
||||||
AssetPermission.assets.through._meta.object_name: (
|
|
||||||
_('Asset permission'),
|
|
||||||
_('{AssetPermission} ADD {Asset}'),
|
|
||||||
_('{AssetPermission} REMOVE {Asset}'),
|
|
||||||
),
|
|
||||||
AssetPermission.nodes.through._meta.object_name: (
|
|
||||||
_('Node permission'),
|
|
||||||
_('{AssetPermission} ADD {Node}'),
|
|
||||||
_('{AssetPermission} REMOVE {Node}'),
|
|
||||||
),
|
|
||||||
AssetPermission.system_users.through._meta.object_name: (
|
|
||||||
_('Asset permission and SystemUser'),
|
|
||||||
_('{AssetPermission} ADD {SystemUser}'),
|
|
||||||
_('{AssetPermission} REMOVE {SystemUser}'),
|
|
||||||
),
|
|
||||||
ApplicationPermission.users.through._meta.object_name: (
|
|
||||||
_('User application permissions'),
|
|
||||||
_('{ApplicationPermission} ADD {User}'),
|
|
||||||
_('{ApplicationPermission} REMOVE {User}'),
|
|
||||||
),
|
|
||||||
ApplicationPermission.user_groups.through._meta.object_name: (
|
|
||||||
_('User group application permissions'),
|
|
||||||
_('{ApplicationPermission} ADD {UserGroup}'),
|
|
||||||
_('{ApplicationPermission} REMOVE {UserGroup}'),
|
|
||||||
),
|
|
||||||
ApplicationPermission.applications.through._meta.object_name: (
|
|
||||||
_('Application permission'),
|
|
||||||
_('{ApplicationPermission} ADD {Application}'),
|
|
||||||
_('{ApplicationPermission} REMOVE {Application}'),
|
|
||||||
),
|
|
||||||
ApplicationPermission.system_users.through._meta.object_name: (
|
|
||||||
_('Application permission and SystemUser'),
|
|
||||||
_('{ApplicationPermission} ADD {SystemUser}'),
|
|
||||||
_('{ApplicationPermission} REMOVE {SystemUser}'),
|
|
||||||
),
|
|
||||||
}
|
|
||||||
|
|
||||||
M2M_ACTION = {
|
M2M_ACTION = {
|
||||||
POST_ADD: OperateLog.ACTION_CREATE,
|
POST_ADD: OperateLog.ACTION_CREATE,
|
||||||
POST_REMOVE: OperateLog.ACTION_DELETE,
|
POST_REMOVE: OperateLog.ACTION_DELETE,
|
||||||
|
@ -137,60 +81,115 @@ M2M_ACTION = {
|
||||||
def on_m2m_changed(sender, action, instance, reverse, model, pk_set, **kwargs):
|
def on_m2m_changed(sender, action, instance, reverse, model, pk_set, **kwargs):
|
||||||
if action not in M2M_ACTION:
|
if action not in M2M_ACTION:
|
||||||
return
|
return
|
||||||
|
if not instance:
|
||||||
user = current_request.user if current_request else None
|
|
||||||
if not user or not user.is_authenticated:
|
|
||||||
return
|
return
|
||||||
|
|
||||||
sender_name = sender._meta.object_name
|
resource_type = instance._meta.verbose_name
|
||||||
if sender_name in M2M_NEED_RECORD:
|
current_instance = model_to_dict(instance, include_model_fields=False)
|
||||||
org_id = current_org.id
|
|
||||||
remote_addr = get_request_ip(current_request)
|
|
||||||
user = str(user)
|
|
||||||
resource_type, resource_tmpl_add, resource_tmpl_remove = M2M_NEED_RECORD[sender_name]
|
|
||||||
action = M2M_ACTION[action]
|
|
||||||
if action == OperateLog.ACTION_CREATE:
|
|
||||||
resource_tmpl = resource_tmpl_add
|
|
||||||
elif action == OperateLog.ACTION_DELETE:
|
|
||||||
resource_tmpl = resource_tmpl_remove
|
|
||||||
|
|
||||||
to_create = []
|
instance_id = current_instance.get('id')
|
||||||
objs = model.objects.filter(pk__in=pk_set)
|
log_id, before_instance = get_instance_dict_from_cache(instance_id)
|
||||||
|
|
||||||
instance_name = instance._meta.object_name
|
field_name = str(model._meta.verbose_name)
|
||||||
instance_value = str(instance)
|
objs = model.objects.filter(pk__in=pk_set)
|
||||||
|
objs_display = [str(o) for o in objs]
|
||||||
|
action = M2M_ACTION[action]
|
||||||
|
changed_field = current_instance.get(field_name, [])
|
||||||
|
|
||||||
model_name = model._meta.object_name
|
after, before, before_value = None, None, None
|
||||||
|
if action == OperateLog.ACTION_CREATE:
|
||||||
|
before_value = list(set(changed_field) - set(objs_display))
|
||||||
|
elif action == OperateLog.ACTION_DELETE:
|
||||||
|
before_value = list(
|
||||||
|
set(changed_field).symmetric_difference(set(objs_display))
|
||||||
|
)
|
||||||
|
|
||||||
for obj in objs:
|
if changed_field:
|
||||||
resource = resource_tmpl.format(**{
|
after = {field_name: changed_field}
|
||||||
instance_name: instance_value,
|
if before_value:
|
||||||
model_name: str(obj)
|
before = {field_name: before_value}
|
||||||
})[:128] # `resource` 字段只有 128 个字符长 😔
|
|
||||||
|
|
||||||
to_create.append(OperateLog(
|
if sorted(str(before)) == sorted(str(after)):
|
||||||
user=user, action=action, resource_type=resource_type,
|
return
|
||||||
resource=resource, remote_addr=remote_addr, org_id=org_id
|
|
||||||
))
|
create_or_update_operate_log(
|
||||||
OperateLog.objects.bulk_create(to_create)
|
OperateLog.ACTION_UPDATE, resource_type,
|
||||||
|
resource=instance, log_id=log_id, before=before, after=after
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def signal_of_operate_log_whether_continue(sender, instance, created, update_fields=None):
|
||||||
|
condition = True
|
||||||
|
if not instance:
|
||||||
|
condition = False
|
||||||
|
if instance and getattr(instance, SKIP_SIGNAL, False):
|
||||||
|
condition = False
|
||||||
|
# 终端模型的 create 事件由系统产生,不记录
|
||||||
|
if instance._meta.object_name == 'Terminal' and created:
|
||||||
|
condition = False
|
||||||
|
# last_login 改变是最后登录日期, 每次登录都会改变
|
||||||
|
if instance._meta.object_name == 'User' and \
|
||||||
|
update_fields and 'last_login' in update_fields:
|
||||||
|
condition = False
|
||||||
|
# 不在记录白名单中,跳过
|
||||||
|
if sender._meta.object_name not in MODELS_NEED_RECORD:
|
||||||
|
condition = False
|
||||||
|
return condition
|
||||||
|
|
||||||
|
|
||||||
|
@receiver(pre_save)
|
||||||
|
def on_object_pre_create_or_update(sender, instance=None, raw=False, using=None, update_fields=None, **kwargs):
|
||||||
|
ok = signal_of_operate_log_whether_continue(
|
||||||
|
sender, instance, False, update_fields
|
||||||
|
)
|
||||||
|
if not ok:
|
||||||
|
return
|
||||||
|
instance_before_data = {'id': instance.id}
|
||||||
|
raw_instance = type(instance).objects.filter(pk=instance.id).first()
|
||||||
|
if raw_instance:
|
||||||
|
instance_before_data = model_to_dict(raw_instance)
|
||||||
|
operate_log_id = str(uuid.uuid4())
|
||||||
|
instance_before_data['operate_log_id'] = operate_log_id
|
||||||
|
setattr(instance, 'operate_log_id', operate_log_id)
|
||||||
|
cache_instance_before_data(instance_before_data)
|
||||||
|
|
||||||
|
|
||||||
@receiver(post_save)
|
@receiver(post_save)
|
||||||
def on_object_created_or_update(sender, instance=None, created=False, update_fields=None, **kwargs):
|
def on_object_created_or_update(sender, instance=None, created=False, update_fields=None, **kwargs):
|
||||||
# last_login 改变是最后登录日期, 每次登录都会改变
|
ok = signal_of_operate_log_whether_continue(
|
||||||
if instance._meta.object_name == 'User' and \
|
sender, instance, created, update_fields
|
||||||
update_fields and 'last_login' in update_fields:
|
)
|
||||||
|
if not ok:
|
||||||
return
|
return
|
||||||
|
|
||||||
|
log_id, before, after = None, None, None
|
||||||
if created:
|
if created:
|
||||||
action = models.OperateLog.ACTION_CREATE
|
action = models.OperateLog.ACTION_CREATE
|
||||||
|
after = model_to_dict(instance)
|
||||||
|
log_id = getattr(instance, 'operate_log_id', None)
|
||||||
else:
|
else:
|
||||||
action = models.OperateLog.ACTION_UPDATE
|
action = models.OperateLog.ACTION_UPDATE
|
||||||
create_operate_log(action, sender, instance)
|
current_instance = model_to_dict(instance)
|
||||||
|
log_id, before, after = get_instance_current_with_cache_diff(current_instance)
|
||||||
|
|
||||||
|
resource_type = sender._meta.verbose_name
|
||||||
|
create_or_update_operate_log(
|
||||||
|
action, resource_type, resource=instance,
|
||||||
|
log_id=log_id, before=before, after=after
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
@receiver(pre_delete)
|
@receiver(pre_delete)
|
||||||
def on_object_delete(sender, instance=None, **kwargs):
|
def on_object_delete(sender, instance=None, **kwargs):
|
||||||
create_operate_log(models.OperateLog.ACTION_DELETE, sender, instance)
|
ok = signal_of_operate_log_whether_continue(sender, instance, False)
|
||||||
|
if not ok:
|
||||||
|
return
|
||||||
|
|
||||||
|
resource_type = sender._meta.verbose_name
|
||||||
|
create_or_update_operate_log(
|
||||||
|
models.OperateLog.ACTION_DELETE, resource_type,
|
||||||
|
resource=instance, before=model_to_dict(instance)
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
@receiver(post_user_change_password, sender=User)
|
@receiver(post_user_change_password, sender=User)
|
||||||
|
|
|
@ -1,14 +1,15 @@
|
||||||
import csv
|
import csv
|
||||||
import codecs
|
import codecs
|
||||||
|
|
||||||
from django.http import HttpResponse
|
from itertools import chain
|
||||||
from django.db import transaction
|
|
||||||
from django.utils import translation
|
|
||||||
|
|
||||||
from audits.models import OperateLog
|
from django.http import HttpResponse
|
||||||
from common.utils import validate_ip, get_ip_city, get_request_ip, get_logger
|
from django.db import models
|
||||||
from jumpserver.utils import current_request
|
|
||||||
from .const import DEFAULT_CITY, MODELS_NEED_RECORD
|
from settings.serializers import SettingsSerializer
|
||||||
|
from common.utils import validate_ip, get_ip_city, get_logger
|
||||||
|
from common.db import fields
|
||||||
|
from .const import DEFAULT_CITY
|
||||||
|
|
||||||
|
|
||||||
logger = get_logger(__name__)
|
logger = get_logger(__name__)
|
||||||
|
@ -46,23 +47,60 @@ def write_login_log(*args, **kwargs):
|
||||||
UserLoginLog.objects.create(**kwargs)
|
UserLoginLog.objects.create(**kwargs)
|
||||||
|
|
||||||
|
|
||||||
def create_operate_log(action, sender, resource):
|
def get_resource_display(resource):
|
||||||
user = current_request.user if current_request else None
|
resource_display = str(resource)
|
||||||
if not user or not user.is_authenticated:
|
setting_serializer = SettingsSerializer()
|
||||||
return
|
label = setting_serializer.get_field_label(resource_display)
|
||||||
model_name = sender._meta.object_name
|
if label is not None:
|
||||||
if model_name not in MODELS_NEED_RECORD:
|
resource_display = label
|
||||||
return
|
return resource_display
|
||||||
with translation.override('en'):
|
|
||||||
resource_type = sender._meta.verbose_name
|
|
||||||
remote_addr = get_request_ip(current_request)
|
|
||||||
|
|
||||||
data = {
|
|
||||||
"user": str(user), 'action': action, 'resource_type': resource_type,
|
def model_to_dict_for_operate_log(
|
||||||
'resource': str(resource), 'remote_addr': remote_addr,
|
instance, include_model_fields=True, include_related_fields=True
|
||||||
}
|
):
|
||||||
with transaction.atomic():
|
need_continue_fields = ['date_updated']
|
||||||
try:
|
opts = instance._meta
|
||||||
OperateLog.objects.create(**data)
|
data = {}
|
||||||
except Exception as e:
|
for f in chain(opts.concrete_fields, opts.private_fields):
|
||||||
logger.error("Create operate log error: {}".format(e))
|
if isinstance(f, (models.FileField, models.ImageField)):
|
||||||
|
continue
|
||||||
|
|
||||||
|
if getattr(f, 'attname', None) in need_continue_fields:
|
||||||
|
continue
|
||||||
|
|
||||||
|
value = getattr(instance, f.name) or getattr(instance, f.attname)
|
||||||
|
if not isinstance(value, bool) and not value:
|
||||||
|
continue
|
||||||
|
|
||||||
|
if getattr(f, 'primary_key', False):
|
||||||
|
f.verbose_name = 'id'
|
||||||
|
elif isinstance(f, (
|
||||||
|
fields.EncryptCharField, fields.EncryptTextField,
|
||||||
|
fields.EncryptJsonDictCharField, fields.EncryptJsonDictTextField
|
||||||
|
)) or getattr(f, 'attname', '') == 'password':
|
||||||
|
value = 'encrypt|%s' % value
|
||||||
|
elif isinstance(value, list):
|
||||||
|
value = [str(v) for v in value]
|
||||||
|
|
||||||
|
if include_model_fields or getattr(f, 'primary_key', False):
|
||||||
|
data[str(f.verbose_name)] = value
|
||||||
|
|
||||||
|
if include_related_fields:
|
||||||
|
for f in chain(opts.many_to_many, opts.related_objects):
|
||||||
|
value = []
|
||||||
|
if instance.pk is not None:
|
||||||
|
related_name = getattr(f, 'attname', '') or getattr(f, 'related_name', '')
|
||||||
|
if related_name:
|
||||||
|
try:
|
||||||
|
value = [str(i) for i in getattr(instance, related_name).all()]
|
||||||
|
except:
|
||||||
|
pass
|
||||||
|
if not value:
|
||||||
|
continue
|
||||||
|
try:
|
||||||
|
field_key = getattr(f, 'verbose_name', None) or f.related_model._meta.verbose_name
|
||||||
|
data[str(field_key)] = value
|
||||||
|
except:
|
||||||
|
pass
|
||||||
|
return data
|
||||||
|
|
|
@ -1,5 +1,6 @@
|
||||||
# Generated by Django 2.1.7 on 2019-02-28 08:07
|
# Generated by Django 2.1.7 on 2019-02-28 08:07
|
||||||
|
|
||||||
|
import common.db.models
|
||||||
from django.conf import settings
|
from django.conf import settings
|
||||||
from django.db import migrations, models
|
from django.db import migrations, models
|
||||||
import django.db.models.deletion
|
import django.db.models.deletion
|
||||||
|
@ -27,7 +28,7 @@ class Migration(migrations.Migration):
|
||||||
models.UUIDField(default=uuid.uuid4, editable=False,
|
models.UUIDField(default=uuid.uuid4, editable=False,
|
||||||
verbose_name='AccessKeySecret')),
|
verbose_name='AccessKeySecret')),
|
||||||
('user', models.ForeignKey(
|
('user', models.ForeignKey(
|
||||||
on_delete=django.db.models.deletion.CASCADE,
|
on_delete=common.db.models.CASCADE_SIGNAL_SKIP,
|
||||||
related_name='access_keys',
|
related_name='access_keys',
|
||||||
to=settings.AUTH_USER_MODEL, verbose_name='User')),
|
to=settings.AUTH_USER_MODEL, verbose_name='User')),
|
||||||
],
|
],
|
||||||
|
|
|
@ -1,8 +1,8 @@
|
||||||
# Generated by Django 3.1.13 on 2021-12-27 02:59
|
# Generated by Django 3.1.13 on 2021-12-27 02:59
|
||||||
|
|
||||||
|
import common.db.models
|
||||||
from django.conf import settings
|
from django.conf import settings
|
||||||
from django.db import migrations, models
|
from django.db import migrations, models
|
||||||
import django.db.models.deletion
|
|
||||||
|
|
||||||
|
|
||||||
class Migration(migrations.Migration):
|
class Migration(migrations.Migration):
|
||||||
|
@ -16,6 +16,6 @@ class Migration(migrations.Migration):
|
||||||
migrations.AlterField(
|
migrations.AlterField(
|
||||||
model_name='ssotoken',
|
model_name='ssotoken',
|
||||||
name='user',
|
name='user',
|
||||||
field=models.ForeignKey(db_constraint=False, on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL, verbose_name='User'),
|
field=models.ForeignKey(db_constraint=False, on_delete=common.db.models.CASCADE_SIGNAL_SKIP, to=settings.AUTH_USER_MODEL, verbose_name='User'),
|
||||||
),
|
),
|
||||||
]
|
]
|
||||||
|
|
|
@ -16,10 +16,10 @@ class AccessKey(models.Model):
|
||||||
default=uuid.uuid4, editable=False)
|
default=uuid.uuid4, editable=False)
|
||||||
secret = models.UUIDField(verbose_name='AccessKeySecret',
|
secret = models.UUIDField(verbose_name='AccessKeySecret',
|
||||||
default=uuid.uuid4, editable=False)
|
default=uuid.uuid4, editable=False)
|
||||||
user = models.ForeignKey(settings.AUTH_USER_MODEL, verbose_name='User',
|
user = models.ForeignKey(settings.AUTH_USER_MODEL, verbose_name=_('User'),
|
||||||
on_delete=models.CASCADE, related_name='access_keys')
|
on_delete=models.CASCADE_SIGNAL_SKIP, related_name='access_keys')
|
||||||
is_active = models.BooleanField(default=True, verbose_name=_('Active'))
|
is_active = models.BooleanField(default=True, verbose_name=_('Active'))
|
||||||
date_created = models.DateTimeField(auto_now_add=True)
|
date_created = models.DateTimeField(auto_now_add=True, verbose_name=_('Date created'))
|
||||||
|
|
||||||
def get_id(self):
|
def get_id(self):
|
||||||
return str(self.id)
|
return str(self.id)
|
||||||
|
@ -51,7 +51,7 @@ class SSOToken(models.JMSBaseModel):
|
||||||
"""
|
"""
|
||||||
authkey = models.UUIDField(primary_key=True, default=uuid.uuid4, verbose_name=_('Token'))
|
authkey = models.UUIDField(primary_key=True, default=uuid.uuid4, verbose_name=_('Token'))
|
||||||
expired = models.BooleanField(default=False, verbose_name=_('Expired'))
|
expired = models.BooleanField(default=False, verbose_name=_('Expired'))
|
||||||
user = models.ForeignKey('users.User', on_delete=models.CASCADE, verbose_name=_('User'), db_constraint=False)
|
user = models.ForeignKey('users.User', on_delete=models.CASCADE_SIGNAL_SKIP, verbose_name=_('User'), db_constraint=False)
|
||||||
|
|
||||||
class Meta:
|
class Meta:
|
||||||
verbose_name = _('SSO token')
|
verbose_name = _('SSO token')
|
||||||
|
|
|
@ -15,3 +15,5 @@ POST_CLEAR = 'post_clear'
|
||||||
|
|
||||||
POST_PREFIX = 'post'
|
POST_PREFIX = 'post'
|
||||||
PRE_PREFIX = 'pre'
|
PRE_PREFIX = 'pre'
|
||||||
|
|
||||||
|
SKIP_SIGNAL = 'skip_signal'
|
||||||
|
|
|
@ -6,6 +6,7 @@ from django.utils.translation import ugettext_lazy as _
|
||||||
from django.utils.encoding import force_text
|
from django.utils.encoding import force_text
|
||||||
from django.core.validators import MinValueValidator, MaxValueValidator
|
from django.core.validators import MinValueValidator, MaxValueValidator
|
||||||
from common.utils import signer, crypto
|
from common.utils import signer, crypto
|
||||||
|
from common.local import add_encrypted_field_set
|
||||||
|
|
||||||
|
|
||||||
__all__ = [
|
__all__ = [
|
||||||
|
@ -149,6 +150,10 @@ class EncryptMixin:
|
||||||
class EncryptTextField(EncryptMixin, models.TextField):
|
class EncryptTextField(EncryptMixin, models.TextField):
|
||||||
description = _("Encrypt field using Secret Key")
|
description = _("Encrypt field using Secret Key")
|
||||||
|
|
||||||
|
def __init__(self, *args, **kwargs):
|
||||||
|
super().__init__(*args, **kwargs)
|
||||||
|
add_encrypted_field_set(self.verbose_name)
|
||||||
|
|
||||||
|
|
||||||
class EncryptCharField(EncryptMixin, models.CharField):
|
class EncryptCharField(EncryptMixin, models.CharField):
|
||||||
@staticmethod
|
@staticmethod
|
||||||
|
@ -163,6 +168,7 @@ class EncryptCharField(EncryptMixin, models.CharField):
|
||||||
def __init__(self, *args, **kwargs):
|
def __init__(self, *args, **kwargs):
|
||||||
self.change_max_length(kwargs)
|
self.change_max_length(kwargs)
|
||||||
super().__init__(*args, **kwargs)
|
super().__init__(*args, **kwargs)
|
||||||
|
add_encrypted_field_set(self.verbose_name)
|
||||||
|
|
||||||
def deconstruct(self):
|
def deconstruct(self):
|
||||||
name, path, args, kwargs = super().deconstruct()
|
name, path, args, kwargs = super().deconstruct()
|
||||||
|
@ -174,11 +180,15 @@ class EncryptCharField(EncryptMixin, models.CharField):
|
||||||
|
|
||||||
|
|
||||||
class EncryptJsonDictTextField(EncryptMixin, JsonDictTextField):
|
class EncryptJsonDictTextField(EncryptMixin, JsonDictTextField):
|
||||||
pass
|
def __init__(self, *args, **kwargs):
|
||||||
|
super().__init__(*args, **kwargs)
|
||||||
|
add_encrypted_field_set(self.verbose_name)
|
||||||
|
|
||||||
|
|
||||||
class EncryptJsonDictCharField(EncryptMixin, JsonDictCharField):
|
class EncryptJsonDictCharField(EncryptMixin, JsonDictCharField):
|
||||||
pass
|
def __init__(self, *args, **kwargs):
|
||||||
|
super().__init__(*args, **kwargs)
|
||||||
|
add_encrypted_field_set(self.verbose_name)
|
||||||
|
|
||||||
|
|
||||||
class PortField(models.IntegerField):
|
class PortField(models.IntegerField):
|
||||||
|
|
|
@ -19,6 +19,8 @@ from django.db.models import QuerySet
|
||||||
from django.db.models.functions import Concat
|
from django.db.models.functions import Concat
|
||||||
from django.utils.translation import ugettext_lazy as _
|
from django.utils.translation import ugettext_lazy as _
|
||||||
|
|
||||||
|
from ..const.signals import SKIP_SIGNAL
|
||||||
|
|
||||||
|
|
||||||
class Choice(str):
|
class Choice(str):
|
||||||
def __new__(cls, value, label=''): # `deepcopy` 的时候不会传 `label`
|
def __new__(cls, value, label=''): # `deepcopy` 的时候不会传 `label`
|
||||||
|
@ -124,6 +126,9 @@ class JMSModel(JMSBaseModel):
|
||||||
class Meta:
|
class Meta:
|
||||||
abstract = True
|
abstract = True
|
||||||
|
|
||||||
|
def __str__(self):
|
||||||
|
return str(self.id)
|
||||||
|
|
||||||
|
|
||||||
def concated_display(name1, name2):
|
def concated_display(name1, name2):
|
||||||
return Concat(F(name1), Value('('), F(name2), Value(')'))
|
return Concat(F(name1), Value('('), F(name2), Value(')'))
|
||||||
|
@ -238,3 +243,14 @@ class MultiTableChildQueryset(QuerySet):
|
||||||
self._batched_insert(objs, self.model._meta.local_fields, batch_size)
|
self._batched_insert(objs, self.model._meta.local_fields, batch_size)
|
||||||
|
|
||||||
return objs
|
return objs
|
||||||
|
|
||||||
|
|
||||||
|
def CASCADE_SIGNAL_SKIP(collector, field, sub_objs, using):
|
||||||
|
# 级联删除时,操作日志标记不保存,以免用户混淆
|
||||||
|
try:
|
||||||
|
for obj in sub_objs:
|
||||||
|
setattr(obj, SKIP_SIGNAL, True)
|
||||||
|
except:
|
||||||
|
pass
|
||||||
|
|
||||||
|
CASCADE(collector, field, sub_objs, using)
|
||||||
|
|
|
@ -4,6 +4,7 @@
|
||||||
from rest_framework import serializers
|
from rest_framework import serializers
|
||||||
|
|
||||||
from common.utils import decrypt_password
|
from common.utils import decrypt_password
|
||||||
|
from common.local import add_encrypted_field_set
|
||||||
|
|
||||||
__all__ = [
|
__all__ = [
|
||||||
'ReadableHiddenField', 'EncryptedField'
|
'ReadableHiddenField', 'EncryptedField'
|
||||||
|
@ -32,6 +33,7 @@ class EncryptedField(serializers.CharField):
|
||||||
write_only = True
|
write_only = True
|
||||||
kwargs['write_only'] = write_only
|
kwargs['write_only'] = write_only
|
||||||
super().__init__(**kwargs)
|
super().__init__(**kwargs)
|
||||||
|
add_encrypted_field_set(self.label)
|
||||||
|
|
||||||
def to_internal_value(self, value):
|
def to_internal_value(self, value):
|
||||||
value = super().to_internal_value(value)
|
value = super().to_internal_value(value)
|
||||||
|
|
|
@ -1,7 +1,13 @@
|
||||||
from werkzeug.local import Local
|
from werkzeug.local import Local
|
||||||
|
|
||||||
thread_local = Local()
|
thread_local = Local()
|
||||||
|
encrypted_field_set = set()
|
||||||
|
|
||||||
|
|
||||||
def _find(attr):
|
def _find(attr):
|
||||||
return getattr(thread_local, attr, None)
|
return getattr(thread_local, attr, None)
|
||||||
|
|
||||||
|
|
||||||
|
def add_encrypted_field_set(label):
|
||||||
|
if label:
|
||||||
|
encrypted_field_set.add(str(label))
|
||||||
|
|
|
@ -7,7 +7,7 @@ from rest_framework import permissions
|
||||||
from rest_framework.request import Request
|
from rest_framework.request import Request
|
||||||
|
|
||||||
from common.exceptions import UserConfirmRequired
|
from common.exceptions import UserConfirmRequired
|
||||||
from audits.utils import create_operate_log
|
from audits.handler import create_or_update_operate_log
|
||||||
from audits.models import OperateLog
|
from audits.models import OperateLog
|
||||||
|
|
||||||
__all__ = ["PermissionsMixin", "RecordViewLogMixin", "UserConfirmRequiredExceptionMixin"]
|
__all__ = ["PermissionsMixin", "RecordViewLogMixin", "UserConfirmRequiredExceptionMixin"]
|
||||||
|
@ -62,10 +62,18 @@ class RecordViewLogMixin:
|
||||||
def list(self, request, *args, **kwargs):
|
def list(self, request, *args, **kwargs):
|
||||||
response = super().list(request, *args, **kwargs)
|
response = super().list(request, *args, **kwargs)
|
||||||
resource = self.get_resource_display(request)
|
resource = self.get_resource_display(request)
|
||||||
create_operate_log(self.ACTION, self.model, resource)
|
resource_type = self.model._meta.verbose_name
|
||||||
|
create_or_update_operate_log(
|
||||||
|
self.ACTION, resource_type, force=True,
|
||||||
|
resource=resource
|
||||||
|
)
|
||||||
return response
|
return response
|
||||||
|
|
||||||
def retrieve(self, request, *args, **kwargs):
|
def retrieve(self, request, *args, **kwargs):
|
||||||
response = super().retrieve(request, *args, **kwargs)
|
response = super().retrieve(request, *args, **kwargs)
|
||||||
create_operate_log(self.ACTION, self.model, self.get_object())
|
resource_type = self.model._meta.verbose_name
|
||||||
|
create_or_update_operate_log(
|
||||||
|
self.ACTION, resource_type, force=True,
|
||||||
|
resource=self.get_object()
|
||||||
|
)
|
||||||
return response
|
return response
|
||||||
|
|
|
@ -0,0 +1,428 @@
|
||||||
|
# -*- coding: utf-8 -*-
|
||||||
|
#
|
||||||
|
import datetime
|
||||||
|
import inspect
|
||||||
|
|
||||||
|
from collections.abc import Iterable
|
||||||
|
from functools import reduce, partial
|
||||||
|
from itertools import groupby
|
||||||
|
from uuid import UUID
|
||||||
|
|
||||||
|
from django.utils.translation import gettext_lazy as _
|
||||||
|
from django.db.models import QuerySet as DJQuerySet
|
||||||
|
from elasticsearch import Elasticsearch
|
||||||
|
from elasticsearch.helpers import bulk
|
||||||
|
from elasticsearch.exceptions import RequestError, NotFoundError
|
||||||
|
|
||||||
|
from common.utils.common import lazyproperty
|
||||||
|
from common.utils import get_logger
|
||||||
|
from common.utils.timezone import local_now_date_display
|
||||||
|
from common.exceptions import JMSException
|
||||||
|
|
||||||
|
|
||||||
|
logger = get_logger(__file__)
|
||||||
|
|
||||||
|
|
||||||
|
class InvalidElasticsearch(JMSException):
|
||||||
|
default_code = 'invalid_elasticsearch'
|
||||||
|
default_detail = _('Invalid elasticsearch config')
|
||||||
|
|
||||||
|
|
||||||
|
class NotSupportElasticsearch8(JMSException):
|
||||||
|
default_code = 'not_support_elasticsearch8'
|
||||||
|
default_detail = _('Not Support Elasticsearch8')
|
||||||
|
|
||||||
|
|
||||||
|
class ES(object):
|
||||||
|
def __init__(self, config, properties, keyword_fields, exact_fields=None, match_fields=None):
|
||||||
|
|
||||||
|
self.config = config
|
||||||
|
hosts = self.config.get('HOSTS')
|
||||||
|
kwargs = self.config.get('OTHER', {})
|
||||||
|
|
||||||
|
ignore_verify_certs = kwargs.pop('IGNORE_VERIFY_CERTS', False)
|
||||||
|
if ignore_verify_certs:
|
||||||
|
kwargs['verify_certs'] = None
|
||||||
|
self.es = Elasticsearch(hosts=hosts, max_retries=0, **kwargs)
|
||||||
|
self.index_prefix = self.config.get('INDEX') or 'jumpserver'
|
||||||
|
self.is_index_by_date = bool(self.config.get('INDEX_BY_DATE', False))
|
||||||
|
|
||||||
|
self.index = None
|
||||||
|
self.query_index = None
|
||||||
|
self.properties = properties
|
||||||
|
self.exact_fields, self.match_fields, self.keyword_fields = set(), set(), set()
|
||||||
|
|
||||||
|
if isinstance(keyword_fields, Iterable):
|
||||||
|
self.keyword_fields.update(keyword_fields)
|
||||||
|
if isinstance(exact_fields, Iterable):
|
||||||
|
self.exact_fields.update(exact_fields)
|
||||||
|
if isinstance(match_fields, Iterable):
|
||||||
|
self.match_fields.update(match_fields)
|
||||||
|
|
||||||
|
self.init_index()
|
||||||
|
self.doc_type = self.config.get("DOC_TYPE") or '_doc'
|
||||||
|
if self.is_new_index_type():
|
||||||
|
self.doc_type = '_doc'
|
||||||
|
self.exact_fields.update(self.keyword_fields)
|
||||||
|
else:
|
||||||
|
self.match_fields.update(self.keyword_fields)
|
||||||
|
|
||||||
|
def init_index(self):
|
||||||
|
if self.is_index_by_date:
|
||||||
|
date = local_now_date_display()
|
||||||
|
self.index = '%s-%s' % (self.index_prefix, date)
|
||||||
|
self.query_index = '%s-alias' % self.index_prefix
|
||||||
|
else:
|
||||||
|
self.index = self.config.get("INDEX") or 'jumpserver'
|
||||||
|
self.query_index = self.config.get("INDEX") or 'jumpserver'
|
||||||
|
|
||||||
|
def is_new_index_type(self):
|
||||||
|
if not self.ping(timeout=2):
|
||||||
|
return False
|
||||||
|
|
||||||
|
info = self.es.info()
|
||||||
|
version = info['version']['number'].split('.')[0]
|
||||||
|
|
||||||
|
if version == '8':
|
||||||
|
raise NotSupportElasticsearch8
|
||||||
|
|
||||||
|
try:
|
||||||
|
# 获取索引信息,如果没有定义,直接返回
|
||||||
|
data = self.es.indices.get_mapping(self.index)
|
||||||
|
except NotFoundError:
|
||||||
|
return False
|
||||||
|
|
||||||
|
try:
|
||||||
|
if version == '6':
|
||||||
|
# 检测索引是不是新的类型 es6
|
||||||
|
properties = data[self.index]['mappings']['data']['properties']
|
||||||
|
else:
|
||||||
|
# 检测索引是不是新的类型 es7 default index type: _doc
|
||||||
|
properties = data[self.index]['mappings']['properties']
|
||||||
|
|
||||||
|
for keyword in self.keyword_fields:
|
||||||
|
if not properties[keyword]['type'] == 'keyword':
|
||||||
|
break
|
||||||
|
else:
|
||||||
|
return True
|
||||||
|
except KeyError:
|
||||||
|
return False
|
||||||
|
|
||||||
|
def pre_use_check(self):
|
||||||
|
if not self.ping(timeout=3):
|
||||||
|
raise InvalidElasticsearch
|
||||||
|
self._ensure_index_exists()
|
||||||
|
|
||||||
|
def _ensure_index_exists(self):
|
||||||
|
info = self.es.info()
|
||||||
|
version = info['version']['number'].split('.')[0]
|
||||||
|
if version == '6':
|
||||||
|
mappings = {'mappings': {'data': {'properties': self.properties}}}
|
||||||
|
else:
|
||||||
|
mappings = {'mappings': {'properties': self.properties}}
|
||||||
|
|
||||||
|
if self.is_index_by_date:
|
||||||
|
mappings['aliases'] = {
|
||||||
|
self.query_index: {}
|
||||||
|
}
|
||||||
|
|
||||||
|
try:
|
||||||
|
self.es.indices.create(self.index, body=mappings)
|
||||||
|
return
|
||||||
|
except RequestError as e:
|
||||||
|
if e.error == 'resource_already_exists_exception':
|
||||||
|
logger.warning(e)
|
||||||
|
else:
|
||||||
|
logger.exception(e)
|
||||||
|
|
||||||
|
def make_data(self, data):
|
||||||
|
return []
|
||||||
|
|
||||||
|
def save(self, **kwargs):
|
||||||
|
data = self.make_data(kwargs)
|
||||||
|
return self.es.index(index=self.index, doc_type=self.doc_type, body=data)
|
||||||
|
|
||||||
|
def bulk_save(self, command_set, raise_on_error=True):
|
||||||
|
actions = []
|
||||||
|
for command in command_set:
|
||||||
|
data = dict(
|
||||||
|
_index=self.index,
|
||||||
|
_type=self.doc_type,
|
||||||
|
_source=self.make_data(command),
|
||||||
|
)
|
||||||
|
actions.append(data)
|
||||||
|
return bulk(self.es, actions, index=self.index, raise_on_error=raise_on_error)
|
||||||
|
|
||||||
|
def get(self, query: dict):
|
||||||
|
item = None
|
||||||
|
data = self.filter(query, size=1)
|
||||||
|
if len(data) >= 1:
|
||||||
|
item = data[0]
|
||||||
|
return item
|
||||||
|
|
||||||
|
def filter(self, query: dict, from_=None, size=None, sort=None):
|
||||||
|
try:
|
||||||
|
data = self._filter(query, from_, size, sort)
|
||||||
|
except Exception as e:
|
||||||
|
logger.error('ES filter error: {}'.format(e))
|
||||||
|
data = []
|
||||||
|
return data
|
||||||
|
|
||||||
|
def _filter(self, query: dict, from_=None, size=None, sort=None):
|
||||||
|
body = self.get_query_body(**query)
|
||||||
|
|
||||||
|
data = self.es.search(
|
||||||
|
index=self.query_index, doc_type=self.doc_type, body=body,
|
||||||
|
from_=from_, size=size, sort=sort
|
||||||
|
)
|
||||||
|
source_data = []
|
||||||
|
for item in data['hits']['hits']:
|
||||||
|
if item:
|
||||||
|
item['_source'].update({'es_id': item['_id']})
|
||||||
|
source_data.append(item['_source'])
|
||||||
|
|
||||||
|
return source_data
|
||||||
|
|
||||||
|
def count(self, **query):
|
||||||
|
try:
|
||||||
|
body = self.get_query_body(**query)
|
||||||
|
data = self.es.count(index=self.query_index, doc_type=self.doc_type, body=body)
|
||||||
|
count = data["count"]
|
||||||
|
except Exception as e:
|
||||||
|
logger.error('ES count error: {}'.format(e))
|
||||||
|
count = 0
|
||||||
|
return count
|
||||||
|
|
||||||
|
def __getattr__(self, item):
|
||||||
|
return getattr(self.es, item)
|
||||||
|
|
||||||
|
def all(self):
|
||||||
|
"""返回所有数据"""
|
||||||
|
raise NotImplementedError("Not support")
|
||||||
|
|
||||||
|
def ping(self, timeout=None):
|
||||||
|
try:
|
||||||
|
return self.es.ping(request_timeout=timeout)
|
||||||
|
except Exception:
|
||||||
|
return False
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def handler_time_field(data):
|
||||||
|
datetime__gte = data.get('datetime__gte')
|
||||||
|
datetime__lte = data.get('datetime__lte')
|
||||||
|
datetime_range = {}
|
||||||
|
|
||||||
|
if datetime__gte:
|
||||||
|
if isinstance(datetime__gte, datetime.datetime):
|
||||||
|
datetime__gte = datetime__gte.strftime('%Y-%m-%d %H:%M:%S')
|
||||||
|
datetime_range['gte'] = datetime__gte
|
||||||
|
if datetime__lte:
|
||||||
|
if isinstance(datetime__lte, datetime.datetime):
|
||||||
|
datetime__lte = datetime__lte.strftime('%Y-%m-%d %H:%M:%S')
|
||||||
|
datetime_range['lte'] = datetime__lte
|
||||||
|
return 'datetime', datetime_range
|
||||||
|
|
||||||
|
def get_query_body(self, **kwargs):
|
||||||
|
new_kwargs = {}
|
||||||
|
for k, v in kwargs.items():
|
||||||
|
if isinstance(v, UUID):
|
||||||
|
v = str(v)
|
||||||
|
if k == 'pk':
|
||||||
|
k = 'id'
|
||||||
|
new_kwargs[k] = v
|
||||||
|
kwargs = new_kwargs
|
||||||
|
|
||||||
|
index_in_field = 'id__in'
|
||||||
|
exact_fields = self.exact_fields
|
||||||
|
match_fields = self.match_fields
|
||||||
|
|
||||||
|
match = {}
|
||||||
|
exact = {}
|
||||||
|
index = {}
|
||||||
|
|
||||||
|
if index_in_field in kwargs:
|
||||||
|
index['values'] = kwargs[index_in_field]
|
||||||
|
|
||||||
|
for k, v in kwargs.items():
|
||||||
|
if k in exact_fields:
|
||||||
|
exact[k] = v
|
||||||
|
elif k in match_fields:
|
||||||
|
match[k] = v
|
||||||
|
|
||||||
|
# 处理时间
|
||||||
|
time_field_name, time_range = self.handler_time_field(kwargs)
|
||||||
|
|
||||||
|
# 处理组织
|
||||||
|
should = []
|
||||||
|
org_id = match.get('org_id')
|
||||||
|
|
||||||
|
real_default_org_id = '00000000-0000-0000-0000-000000000002'
|
||||||
|
root_org_id = '00000000-0000-0000-0000-000000000000'
|
||||||
|
|
||||||
|
if org_id == root_org_id:
|
||||||
|
match.pop('org_id')
|
||||||
|
elif org_id in (real_default_org_id, ''):
|
||||||
|
match.pop('org_id')
|
||||||
|
should.append({
|
||||||
|
'bool': {
|
||||||
|
'must_not': [
|
||||||
|
{
|
||||||
|
'wildcard': {'org_id': '*'}
|
||||||
|
}
|
||||||
|
]}
|
||||||
|
})
|
||||||
|
should.append({'match': {'org_id': real_default_org_id}})
|
||||||
|
|
||||||
|
# 构建 body
|
||||||
|
body = {
|
||||||
|
'query': {
|
||||||
|
'bool': {
|
||||||
|
'must': [
|
||||||
|
{'match': {k: v}} for k, v in match.items()
|
||||||
|
],
|
||||||
|
'should': should,
|
||||||
|
'filter': [
|
||||||
|
{
|
||||||
|
'term': {k: v}
|
||||||
|
} for k, v in exact.items()
|
||||||
|
] + [
|
||||||
|
{
|
||||||
|
'range': {
|
||||||
|
time_field_name: time_range
|
||||||
|
}
|
||||||
|
}
|
||||||
|
] + [
|
||||||
|
{
|
||||||
|
'ids': {k: v}
|
||||||
|
} for k, v in index.items()
|
||||||
|
]
|
||||||
|
}
|
||||||
|
},
|
||||||
|
}
|
||||||
|
return body
|
||||||
|
|
||||||
|
|
||||||
|
class QuerySet(DJQuerySet):
|
||||||
|
default_days_ago = 7
|
||||||
|
max_result_window = 10000
|
||||||
|
|
||||||
|
def __init__(self, es_instance):
|
||||||
|
self._method_calls = []
|
||||||
|
self._slice = None # (from_, size)
|
||||||
|
self._storage = es_instance
|
||||||
|
|
||||||
|
# 命令列表模糊搜索时报错
|
||||||
|
super().__init__()
|
||||||
|
|
||||||
|
@lazyproperty
|
||||||
|
def _grouped_method_calls(self):
|
||||||
|
_method_calls = {k: list(v) for k, v in groupby(self._method_calls, lambda x: x[0])}
|
||||||
|
return _method_calls
|
||||||
|
|
||||||
|
@lazyproperty
|
||||||
|
def _filter_kwargs(self):
|
||||||
|
_method_calls = self._grouped_method_calls
|
||||||
|
filter_calls = _method_calls.get('filter')
|
||||||
|
if not filter_calls:
|
||||||
|
return {}
|
||||||
|
names, multi_args, multi_kwargs = zip(*filter_calls)
|
||||||
|
kwargs = reduce(lambda x, y: {**x, **y}, multi_kwargs, {})
|
||||||
|
|
||||||
|
striped_kwargs = {}
|
||||||
|
for k, v in kwargs.items():
|
||||||
|
k = k.replace('__exact', '')
|
||||||
|
k = k.replace('__startswith', '')
|
||||||
|
k = k.replace('__icontains', '')
|
||||||
|
striped_kwargs[k] = v
|
||||||
|
return striped_kwargs
|
||||||
|
|
||||||
|
@lazyproperty
|
||||||
|
def _sort(self):
|
||||||
|
order_by = self._grouped_method_calls.get('order_by')
|
||||||
|
if order_by:
|
||||||
|
for call in reversed(order_by):
|
||||||
|
fields = call[1]
|
||||||
|
if fields:
|
||||||
|
field = fields[-1]
|
||||||
|
|
||||||
|
if field.startswith('-'):
|
||||||
|
direction = 'desc'
|
||||||
|
else:
|
||||||
|
direction = 'asc'
|
||||||
|
field = field.lstrip('-+')
|
||||||
|
sort = f'{field}:{direction}'
|
||||||
|
return sort
|
||||||
|
|
||||||
|
def __execute(self):
|
||||||
|
_filter_kwargs = self._filter_kwargs
|
||||||
|
_sort = self._sort
|
||||||
|
from_, size = self._slice or (None, None)
|
||||||
|
data = self._storage.filter(_filter_kwargs, from_=from_, size=size, sort=_sort)
|
||||||
|
return self.model.from_multi_dict(data)
|
||||||
|
|
||||||
|
def __stage_method_call(self, item, *args, **kwargs):
|
||||||
|
_clone = self.__clone()
|
||||||
|
_clone._method_calls.append((item, args, kwargs))
|
||||||
|
return _clone
|
||||||
|
|
||||||
|
def __clone(self):
|
||||||
|
uqs = QuerySet(self._storage)
|
||||||
|
uqs._method_calls = self._method_calls.copy()
|
||||||
|
uqs._slice = self._slice
|
||||||
|
uqs.model = self.model
|
||||||
|
return uqs
|
||||||
|
|
||||||
|
def get(self, **kwargs):
|
||||||
|
kwargs.update(self._filter_kwargs)
|
||||||
|
return self._storage.get(kwargs)
|
||||||
|
|
||||||
|
def count(self, limit_to_max_result_window=True):
|
||||||
|
filter_kwargs = self._filter_kwargs
|
||||||
|
count = self._storage.count(**filter_kwargs)
|
||||||
|
if limit_to_max_result_window:
|
||||||
|
count = min(count, self.max_result_window)
|
||||||
|
return count
|
||||||
|
|
||||||
|
def __getattribute__(self, item):
|
||||||
|
if any((
|
||||||
|
item.startswith('__'),
|
||||||
|
item in QuerySet.__dict__,
|
||||||
|
)):
|
||||||
|
return object.__getattribute__(self, item)
|
||||||
|
|
||||||
|
origin_attr = object.__getattribute__(self, item)
|
||||||
|
if not inspect.ismethod(origin_attr):
|
||||||
|
return origin_attr
|
||||||
|
|
||||||
|
attr = partial(self.__stage_method_call, item)
|
||||||
|
return attr
|
||||||
|
|
||||||
|
def __getitem__(self, item):
|
||||||
|
max_window = self.max_result_window
|
||||||
|
if isinstance(item, slice):
|
||||||
|
if self._slice is None:
|
||||||
|
clone = self.__clone()
|
||||||
|
from_ = item.start or 0
|
||||||
|
if item.stop is None:
|
||||||
|
size = self.max_result_window - from_
|
||||||
|
else:
|
||||||
|
size = item.stop - from_
|
||||||
|
|
||||||
|
if from_ + size > max_window:
|
||||||
|
if from_ >= max_window:
|
||||||
|
from_ = max_window
|
||||||
|
size = 0
|
||||||
|
else:
|
||||||
|
size = max_window - from_
|
||||||
|
clone._slice = (from_, size)
|
||||||
|
return clone
|
||||||
|
return self.__execute()[item]
|
||||||
|
|
||||||
|
def __repr__(self):
|
||||||
|
return self.__execute().__repr__()
|
||||||
|
|
||||||
|
def __iter__(self):
|
||||||
|
return iter(self.__execute())
|
||||||
|
|
||||||
|
def __len__(self):
|
||||||
|
return self.count()
|
|
@ -0,0 +1,6 @@
|
||||||
|
from django.conf import settings
|
||||||
|
from rest_framework.pagination import LimitOffsetPagination
|
||||||
|
|
||||||
|
|
||||||
|
class MaxLimitOffsetPagination(LimitOffsetPagination):
|
||||||
|
max_limit = settings.MAX_LIMIT_PER_PAGE or 100
|
|
@ -178,5 +178,9 @@ HELP_SUPPORT_URL = CONFIG.HELP_SUPPORT_URL
|
||||||
SESSION_RSA_PRIVATE_KEY_NAME = 'jms_private_key'
|
SESSION_RSA_PRIVATE_KEY_NAME = 'jms_private_key'
|
||||||
SESSION_RSA_PUBLIC_KEY_NAME = 'jms_public_key'
|
SESSION_RSA_PUBLIC_KEY_NAME = 'jms_public_key'
|
||||||
|
|
||||||
|
OPERATE_LOG_ELASTICSEARCH_CONFIG = CONFIG.OPERATE_LOG_ELASTICSEARCH_CONFIG
|
||||||
|
|
||||||
|
MAX_LIMIT_PER_PAGE = CONFIG.MAX_LIMIT_PER_PAGE
|
||||||
|
|
||||||
# Magnus DB Port
|
# Magnus DB Port
|
||||||
MAGNUS_PORTS = CONFIG.MAGNUS_PORTS
|
MAGNUS_PORTS = CONFIG.MAGNUS_PORTS
|
||||||
|
|
|
@ -47,7 +47,7 @@ REST_FRAMEWORK = {
|
||||||
'SEARCH_PARAM': "search",
|
'SEARCH_PARAM': "search",
|
||||||
'DATETIME_FORMAT': '%Y/%m/%d %H:%M:%S %z',
|
'DATETIME_FORMAT': '%Y/%m/%d %H:%M:%S %z',
|
||||||
'DATETIME_INPUT_FORMATS': ['%Y/%m/%d %H:%M:%S %z', 'iso-8601', '%Y-%m-%d %H:%M:%S %z'],
|
'DATETIME_INPUT_FORMATS': ['%Y/%m/%d %H:%M:%S %z', 'iso-8601', '%Y-%m-%d %H:%M:%S %z'],
|
||||||
'DEFAULT_PAGINATION_CLASS': 'rest_framework.pagination.LimitOffsetPagination',
|
'DEFAULT_PAGINATION_CLASS': 'jumpserver.rewriting.pagination.MaxLimitOffsetPagination',
|
||||||
'EXCEPTION_HANDLER': 'common.drf.exc_handlers.common_exception_handler',
|
'EXCEPTION_HANDLER': 'common.drf.exc_handlers.common_exception_handler',
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
|
@ -43,11 +43,11 @@ class Migration(migrations.Migration):
|
||||||
('date_updated', models.DateTimeField(auto_now=True, verbose_name='Date updated')),
|
('date_updated', models.DateTimeField(auto_now=True, verbose_name='Date updated')),
|
||||||
('id', models.UUIDField(default=uuid.uuid4, primary_key=True, serialize=False)),
|
('id', models.UUIDField(default=uuid.uuid4, primary_key=True, serialize=False)),
|
||||||
('message_type', models.CharField(max_length=128)),
|
('message_type', models.CharField(max_length=128)),
|
||||||
('receive_backends', models.JSONField(default=list)),
|
('receive_backends', models.JSONField(default=list, verbose_name='receive backend')),
|
||||||
('user', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='user_msg_subscription', to=settings.AUTH_USER_MODEL)),
|
('user', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='user_msg_subscription', to=settings.AUTH_USER_MODEL)),
|
||||||
],
|
],
|
||||||
options={
|
options={
|
||||||
'abstract': False,
|
'abstract': False, 'verbose_name': 'User message'
|
||||||
},
|
},
|
||||||
),
|
),
|
||||||
migrations.CreateModel(
|
migrations.CreateModel(
|
||||||
|
@ -64,7 +64,7 @@ class Migration(migrations.Migration):
|
||||||
('users', models.ManyToManyField(related_name='system_msg_subscriptions', to=settings.AUTH_USER_MODEL)),
|
('users', models.ManyToManyField(related_name='system_msg_subscriptions', to=settings.AUTH_USER_MODEL)),
|
||||||
],
|
],
|
||||||
options={
|
options={
|
||||||
'abstract': False,
|
'abstract': False, 'verbose_name': 'System message'
|
||||||
},
|
},
|
||||||
),
|
),
|
||||||
migrations.CreateModel(
|
migrations.CreateModel(
|
||||||
|
|
|
@ -1,8 +1,8 @@
|
||||||
# Generated by Django 3.1.12 on 2021-09-09 11:46
|
# Generated by Django 3.1.12 on 2021-09-09 11:46
|
||||||
|
|
||||||
|
import common.db.models
|
||||||
from django.conf import settings
|
from django.conf import settings
|
||||||
from django.db import migrations, models
|
from django.db import migrations, models
|
||||||
import django.db.models.deletion
|
|
||||||
|
|
||||||
|
|
||||||
def init_user_msg_subscription(apps, schema_editor):
|
def init_user_msg_subscription(apps, schema_editor):
|
||||||
|
@ -49,7 +49,7 @@ class Migration(migrations.Migration):
|
||||||
migrations.AlterField(
|
migrations.AlterField(
|
||||||
model_name='usermsgsubscription',
|
model_name='usermsgsubscription',
|
||||||
name='user',
|
name='user',
|
||||||
field=models.OneToOneField(on_delete=django.db.models.deletion.CASCADE, related_name='user_msg_subscription', to=settings.AUTH_USER_MODEL),
|
field=models.OneToOneField(on_delete=common.db.models.CASCADE_SIGNAL_SKIP, related_name='user_msg_subscription', to=settings.AUTH_USER_MODEL),
|
||||||
),
|
),
|
||||||
migrations.RunPython(init_user_msg_subscription)
|
migrations.RunPython(init_user_msg_subscription)
|
||||||
]
|
]
|
||||||
|
|
|
@ -1,16 +1,23 @@
|
||||||
from django.db import models
|
from django.db import models
|
||||||
|
from django.utils.translation import ugettext_lazy as _
|
||||||
|
|
||||||
from common.db.models import JMSModel
|
from common.db.models import JMSModel, CASCADE_SIGNAL_SKIP
|
||||||
|
|
||||||
__all__ = ('SystemMsgSubscription', 'UserMsgSubscription')
|
__all__ = ('SystemMsgSubscription', 'UserMsgSubscription')
|
||||||
|
|
||||||
|
|
||||||
class UserMsgSubscription(JMSModel):
|
class UserMsgSubscription(JMSModel):
|
||||||
user = models.OneToOneField('users.User', related_name='user_msg_subscription', on_delete=models.CASCADE)
|
user = models.OneToOneField(
|
||||||
receive_backends = models.JSONField(default=list)
|
'users.User', related_name='user_msg_subscription', on_delete=CASCADE_SIGNAL_SKIP,
|
||||||
|
verbose_name=_('User')
|
||||||
|
)
|
||||||
|
receive_backends = models.JSONField(default=list, verbose_name=_('receive backend'))
|
||||||
|
|
||||||
|
class Meta:
|
||||||
|
verbose_name = _('User message')
|
||||||
|
|
||||||
def __str__(self):
|
def __str__(self):
|
||||||
return f'{self.user} subscription: {self.receive_backends}'
|
return _('{} subscription').format(self.user)
|
||||||
|
|
||||||
|
|
||||||
class SystemMsgSubscription(JMSModel):
|
class SystemMsgSubscription(JMSModel):
|
||||||
|
@ -21,11 +28,19 @@ class SystemMsgSubscription(JMSModel):
|
||||||
|
|
||||||
message_type_label = ''
|
message_type_label = ''
|
||||||
|
|
||||||
def __str__(self):
|
class Meta:
|
||||||
return f'{self.message_type}'
|
verbose_name = _('System message')
|
||||||
|
|
||||||
def __repr__(self):
|
def set_message_type_label(self):
|
||||||
return self.__str__()
|
# 采用手动调用,没设置成 property 的方式
|
||||||
|
# 因为目前只有界面修改时会用到这个属性,避免实例化时占用资源计算
|
||||||
|
from ..notifications import system_msgs
|
||||||
|
msg_label = ''
|
||||||
|
for msg in system_msgs:
|
||||||
|
if msg.get('message_type') == self.message_type:
|
||||||
|
msg_label = msg.get('message_type_label', '')
|
||||||
|
break
|
||||||
|
self.message_type_label = msg_label
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def receivers(self):
|
def receivers(self):
|
||||||
|
@ -47,3 +62,9 @@ class SystemMsgSubscription(JMSModel):
|
||||||
receviers.append(recevier)
|
receviers.append(recevier)
|
||||||
|
|
||||||
return receviers
|
return receviers
|
||||||
|
|
||||||
|
def __str__(self):
|
||||||
|
return f'{self.message_type_label}' or f'{self.message_type}'
|
||||||
|
|
||||||
|
def __repr__(self):
|
||||||
|
return self.__str__()
|
||||||
|
|
|
@ -22,6 +22,10 @@ class SystemMsgSubscriptionSerializer(BulkModelSerializer):
|
||||||
'receive_backends': {'required': True}
|
'receive_backends': {'required': True}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
def update(self, instance, validated_data):
|
||||||
|
instance.set_message_type_label()
|
||||||
|
return super().update(instance, validated_data)
|
||||||
|
|
||||||
|
|
||||||
class SystemMsgSubscriptionByCategorySerializer(serializers.Serializer):
|
class SystemMsgSubscriptionByCategorySerializer(serializers.Serializer):
|
||||||
category = serializers.CharField()
|
category = serializers.CharField()
|
||||||
|
|
|
@ -6,11 +6,11 @@ from django.utils.translation import ugettext_lazy as _
|
||||||
from django.db import models
|
from django.db import models
|
||||||
from django.db.models import Q
|
from django.db.models import Q
|
||||||
from django.utils import timezone
|
from django.utils import timezone
|
||||||
from orgs.mixins.models import OrgModelMixin
|
|
||||||
|
|
||||||
|
from orgs.mixins.models import OrgModelMixin, OrgManager
|
||||||
from common.db.models import UnionQuerySet, BitOperationChoice
|
from common.db.models import UnionQuerySet, BitOperationChoice
|
||||||
from common.utils import date_expired_default, lazyproperty
|
from common.utils import date_expired_default, lazyproperty
|
||||||
from orgs.mixins.models import OrgManager
|
|
||||||
|
|
||||||
__all__ = [
|
__all__ = [
|
||||||
'BasePermission', 'BasePermissionQuerySet', 'Action'
|
'BasePermission', 'BasePermissionQuerySet', 'Action'
|
||||||
|
|
|
@ -1,5 +1,6 @@
|
||||||
# Generated by Django 3.1.13 on 2021-11-19 08:29
|
# Generated by Django 3.1.13 on 2021-11-19 08:29
|
||||||
|
|
||||||
|
import common.db.models
|
||||||
from django.conf import settings
|
from django.conf import settings
|
||||||
import django.contrib.auth.models
|
import django.contrib.auth.models
|
||||||
import django.contrib.contenttypes.models
|
import django.contrib.contenttypes.models
|
||||||
|
@ -84,7 +85,7 @@ class Migration(migrations.Migration):
|
||||||
('scope', models.CharField(choices=[('system', 'System'), ('org', 'Organization')], default='system', max_length=128, verbose_name='Scope')),
|
('scope', models.CharField(choices=[('system', 'System'), ('org', 'Organization')], default='system', max_length=128, verbose_name='Scope')),
|
||||||
('org', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='role_bindings', to='orgs.organization', verbose_name='Organization')),
|
('org', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='role_bindings', to='orgs.organization', verbose_name='Organization')),
|
||||||
('role', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='role_bindings', to='rbac.role', verbose_name='Role')),
|
('role', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='role_bindings', to='rbac.role', verbose_name='Role')),
|
||||||
('user', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='role_bindings', to=settings.AUTH_USER_MODEL, verbose_name='User')),
|
('user', models.ForeignKey(on_delete=common.db.models.CASCADE_SIGNAL_SKIP, related_name='role_bindings', to=settings.AUTH_USER_MODEL, verbose_name='User')),
|
||||||
],
|
],
|
||||||
options={
|
options={
|
||||||
'verbose_name': 'Role binding',
|
'verbose_name': 'Role binding',
|
||||||
|
|
|
@ -12,7 +12,7 @@ class Migration(migrations.Migration):
|
||||||
operations = [
|
operations = [
|
||||||
migrations.AlterModelOptions(
|
migrations.AlterModelOptions(
|
||||||
name='permission',
|
name='permission',
|
||||||
options={'verbose_name': 'Permission'},
|
options={'verbose_name': 'Permissions'},
|
||||||
),
|
),
|
||||||
migrations.AlterModelOptions(
|
migrations.AlterModelOptions(
|
||||||
name='role',
|
name='role',
|
||||||
|
|
|
@ -23,7 +23,7 @@ class Permission(DjangoPermission):
|
||||||
""" 权限类 """
|
""" 权限类 """
|
||||||
class Meta:
|
class Meta:
|
||||||
proxy = True
|
proxy = True
|
||||||
verbose_name = _('Permission')
|
verbose_name = _('Permissions')
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def to_perms(cls, queryset):
|
def to_perms(cls, queryset):
|
||||||
|
|
|
@ -5,7 +5,7 @@ from django.conf import settings
|
||||||
from django.core.exceptions import ValidationError
|
from django.core.exceptions import ValidationError
|
||||||
from rest_framework.serializers import ValidationError
|
from rest_framework.serializers import ValidationError
|
||||||
|
|
||||||
from common.db.models import JMSModel
|
from common.db.models import JMSModel, CASCADE_SIGNAL_SKIP
|
||||||
from common.utils import lazyproperty
|
from common.utils import lazyproperty
|
||||||
from orgs.utils import current_org, tmp_to_root_org
|
from orgs.utils import current_org, tmp_to_root_org
|
||||||
from .role import Role
|
from .role import Role
|
||||||
|
@ -38,7 +38,7 @@ class RoleBinding(JMSModel):
|
||||||
verbose_name=_('Scope')
|
verbose_name=_('Scope')
|
||||||
)
|
)
|
||||||
user = models.ForeignKey(
|
user = models.ForeignKey(
|
||||||
'users.User', related_name='role_bindings', on_delete=models.CASCADE, verbose_name=_('User')
|
'users.User', related_name='role_bindings', on_delete=CASCADE_SIGNAL_SKIP, verbose_name=_('User')
|
||||||
)
|
)
|
||||||
role = models.ForeignKey(
|
role = models.ForeignKey(
|
||||||
Role, related_name='role_bindings', on_delete=models.CASCADE, verbose_name=_('Role')
|
Role, related_name='role_bindings', on_delete=models.CASCADE, verbose_name=_('Role')
|
||||||
|
@ -56,7 +56,7 @@ class RoleBinding(JMSModel):
|
||||||
]
|
]
|
||||||
|
|
||||||
def __str__(self):
|
def __str__(self):
|
||||||
display = '{user} & {role}'.format(user=self.user, role=self.role)
|
display = '{role} -> {user}'.format(user=self.user, role=self.role)
|
||||||
if self.org:
|
if self.org:
|
||||||
display += ' | {org}'.format(org=self.org)
|
display += ' | {org}'.format(org=self.org)
|
||||||
return display
|
return display
|
||||||
|
|
|
@ -21,8 +21,8 @@ class Migration(migrations.Migration):
|
||||||
verbose_name='Name')),
|
verbose_name='Name')),
|
||||||
('value', models.TextField(verbose_name='Value')),
|
('value', models.TextField(verbose_name='Value')),
|
||||||
('category',
|
('category',
|
||||||
models.CharField(default='default', max_length=128)),
|
models.CharField(default='default', max_length=128, verbose_name='Category')),
|
||||||
('encrypted', models.BooleanField(default=False)),
|
('encrypted', models.BooleanField(default=False, verbose_name='Encrypted')),
|
||||||
('enabled',
|
('enabled',
|
||||||
models.BooleanField(default=True, verbose_name='Enabled')),
|
models.BooleanField(default=True, verbose_name='Enabled')),
|
||||||
('comment', models.TextField(verbose_name='Comment')),
|
('comment', models.TextField(verbose_name='Comment')),
|
||||||
|
|
|
@ -32,8 +32,8 @@ class SettingManager(models.Manager):
|
||||||
class Setting(models.Model):
|
class Setting(models.Model):
|
||||||
name = models.CharField(max_length=128, unique=True, verbose_name=_("Name"))
|
name = models.CharField(max_length=128, unique=True, verbose_name=_("Name"))
|
||||||
value = models.TextField(verbose_name=_("Value"), null=True, blank=True)
|
value = models.TextField(verbose_name=_("Value"), null=True, blank=True)
|
||||||
category = models.CharField(max_length=128, default="default")
|
category = models.CharField(max_length=128, default="default", verbose_name=_('Category'))
|
||||||
encrypted = models.BooleanField(default=False)
|
encrypted = models.BooleanField(default=False, verbose_name=_('Encrypted'))
|
||||||
enabled = models.BooleanField(verbose_name=_("Enabled"), default=True)
|
enabled = models.BooleanField(verbose_name=_("Enabled"), default=True)
|
||||||
comment = models.TextField(verbose_name=_("Comment"))
|
comment = models.TextField(verbose_name=_("Comment"))
|
||||||
|
|
||||||
|
|
|
@ -7,6 +7,8 @@ __all__ = [
|
||||||
|
|
||||||
|
|
||||||
class AuthSettingSerializer(serializers.Serializer):
|
class AuthSettingSerializer(serializers.Serializer):
|
||||||
|
PREFIX_TITLE = '%s-%s' % (_('Authentication'), _('Basic'))
|
||||||
|
|
||||||
AUTH_CAS = serializers.BooleanField(required=False, label=_('CAS Auth'))
|
AUTH_CAS = serializers.BooleanField(required=False, label=_('CAS Auth'))
|
||||||
AUTH_OPENID = serializers.BooleanField(required=False, label=_('OPENID Auth'))
|
AUTH_OPENID = serializers.BooleanField(required=False, label=_('OPENID Auth'))
|
||||||
AUTH_RADIUS = serializers.BooleanField(required=False, label=_('RADIUS Auth'))
|
AUTH_RADIUS = serializers.BooleanField(required=False, label=_('RADIUS Auth'))
|
||||||
|
|
|
@ -7,6 +7,8 @@ __all__ = [
|
||||||
|
|
||||||
|
|
||||||
class CASSettingSerializer(serializers.Serializer):
|
class CASSettingSerializer(serializers.Serializer):
|
||||||
|
PREFIX_TITLE = '%s-%s' % (_('Authentication'), _('CAS'))
|
||||||
|
|
||||||
AUTH_CAS = serializers.BooleanField(required=False, label=_('Enable CAS Auth'))
|
AUTH_CAS = serializers.BooleanField(required=False, label=_('Enable CAS Auth'))
|
||||||
CAS_SERVER_URL = serializers.CharField(required=False, max_length=1024, label=_('Server url'))
|
CAS_SERVER_URL = serializers.CharField(required=False, max_length=1024, label=_('Server url'))
|
||||||
CAS_ROOT_PROXIED_AS = serializers.CharField(
|
CAS_ROOT_PROXIED_AS = serializers.CharField(
|
||||||
|
|
|
@ -7,6 +7,8 @@ __all__ = ['DingTalkSettingSerializer']
|
||||||
|
|
||||||
|
|
||||||
class DingTalkSettingSerializer(serializers.Serializer):
|
class DingTalkSettingSerializer(serializers.Serializer):
|
||||||
|
PREFIX_TITLE = '%s-%s' % (_('Authentication'), _('DingTalk'))
|
||||||
|
|
||||||
DINGTALK_AGENTID = serializers.CharField(max_length=256, required=True, label='AgentId')
|
DINGTALK_AGENTID = serializers.CharField(max_length=256, required=True, label='AgentId')
|
||||||
DINGTALK_APPKEY = serializers.CharField(max_length=256, required=True, label='AppKey')
|
DINGTALK_APPKEY = serializers.CharField(max_length=256, required=True, label='AppKey')
|
||||||
DINGTALK_APPSECRET = EncryptedField(max_length=256, required=False, label='AppSecret')
|
DINGTALK_APPSECRET = EncryptedField(max_length=256, required=False, label='AppSecret')
|
||||||
|
|
|
@ -7,6 +7,8 @@ __all__ = ['FeiShuSettingSerializer']
|
||||||
|
|
||||||
|
|
||||||
class FeiShuSettingSerializer(serializers.Serializer):
|
class FeiShuSettingSerializer(serializers.Serializer):
|
||||||
|
PREFIX_TITLE = '%s-%s' % (_('Authentication'), _('FeiShu'))
|
||||||
|
|
||||||
FEISHU_APP_ID = serializers.CharField(max_length=256, required=True, label='App ID')
|
FEISHU_APP_ID = serializers.CharField(max_length=256, required=True, label='App ID')
|
||||||
FEISHU_APP_SECRET = EncryptedField(max_length=256, required=False, label='App Secret')
|
FEISHU_APP_SECRET = EncryptedField(max_length=256, required=False, label='App Secret')
|
||||||
AUTH_FEISHU = serializers.BooleanField(default=False, label=_('Enable FeiShu Auth'))
|
AUTH_FEISHU = serializers.BooleanField(default=False, label=_('Enable FeiShu Auth'))
|
||||||
|
|
|
@ -36,6 +36,7 @@ class LDAPUserSerializer(serializers.Serializer):
|
||||||
|
|
||||||
class LDAPSettingSerializer(serializers.Serializer):
|
class LDAPSettingSerializer(serializers.Serializer):
|
||||||
# encrypt_fields 现在使用 write_only 来判断了
|
# encrypt_fields 现在使用 write_only 来判断了
|
||||||
|
PREFIX_TITLE = '%s-%s' % (_('Authentication'), _('LDAP'))
|
||||||
|
|
||||||
AUTH_LDAP_SERVER_URI = serializers.CharField(
|
AUTH_LDAP_SERVER_URI = serializers.CharField(
|
||||||
required=True, max_length=1024, label=_('LDAP server'),
|
required=True, max_length=1024, label=_('LDAP server'),
|
||||||
|
|
|
@ -16,6 +16,8 @@ class SettingImageField(serializers.ImageField):
|
||||||
|
|
||||||
|
|
||||||
class OAuth2SettingSerializer(serializers.Serializer):
|
class OAuth2SettingSerializer(serializers.Serializer):
|
||||||
|
PREFIX_TITLE = '%s-%s' % (_('Authentication'), _('OAuth2'))
|
||||||
|
|
||||||
AUTH_OAUTH2 = serializers.BooleanField(
|
AUTH_OAUTH2 = serializers.BooleanField(
|
||||||
default=False, label=_('Enable OAuth2 Auth')
|
default=False, label=_('Enable OAuth2 Auth')
|
||||||
)
|
)
|
||||||
|
|
|
@ -9,6 +9,7 @@ __all__ = [
|
||||||
|
|
||||||
|
|
||||||
class CommonSettingSerializer(serializers.Serializer):
|
class CommonSettingSerializer(serializers.Serializer):
|
||||||
|
PREFIX_TITLE = '%s-%s' % (_('Authentication'), _('OIDC'))
|
||||||
# OpenID 公有配置参数 (version <= 1.5.8 或 version >= 1.5.8)
|
# OpenID 公有配置参数 (version <= 1.5.8 或 version >= 1.5.8)
|
||||||
BASE_SITE_URL = serializers.CharField(
|
BASE_SITE_URL = serializers.CharField(
|
||||||
required=False, allow_null=True, allow_blank=True,
|
required=False, allow_null=True, allow_blank=True,
|
||||||
|
|
|
@ -10,6 +10,8 @@ __all__ = ['RadiusSettingSerializer']
|
||||||
|
|
||||||
|
|
||||||
class RadiusSettingSerializer(serializers.Serializer):
|
class RadiusSettingSerializer(serializers.Serializer):
|
||||||
|
PREFIX_TITLE = '%s-%s' % (_('Authentication'), _('Radius'))
|
||||||
|
|
||||||
AUTH_RADIUS = serializers.BooleanField(required=False, label=_('Enable Radius Auth'))
|
AUTH_RADIUS = serializers.BooleanField(required=False, label=_('Enable Radius Auth'))
|
||||||
RADIUS_SERVER = serializers.CharField(required=False, allow_blank=True, max_length=1024, label=_('Host'))
|
RADIUS_SERVER = serializers.CharField(required=False, allow_blank=True, max_length=1024, label=_('Host'))
|
||||||
RADIUS_PORT = serializers.IntegerField(required=False, label=_('Port'))
|
RADIUS_PORT = serializers.IntegerField(required=False, label=_('Port'))
|
||||||
|
|
|
@ -8,6 +8,8 @@ __all__ = [
|
||||||
|
|
||||||
|
|
||||||
class SAML2SettingSerializer(serializers.Serializer):
|
class SAML2SettingSerializer(serializers.Serializer):
|
||||||
|
PREFIX_TITLE = '%s-%s' % (_('Authentication'), _('SAML2'))
|
||||||
|
|
||||||
AUTH_SAML2 = serializers.BooleanField(
|
AUTH_SAML2 = serializers.BooleanField(
|
||||||
default=False, required=False, label=_('Enable SAML2 Auth')
|
default=False, required=False, label=_('Enable SAML2 Auth')
|
||||||
)
|
)
|
||||||
|
|
|
@ -24,6 +24,8 @@ class SignTmplPairSerializer(serializers.Serializer):
|
||||||
|
|
||||||
|
|
||||||
class BaseSMSSettingSerializer(serializers.Serializer):
|
class BaseSMSSettingSerializer(serializers.Serializer):
|
||||||
|
PREFIX_TITLE = _('SMS')
|
||||||
|
|
||||||
SMS_TEST_PHONE = serializers.CharField(
|
SMS_TEST_PHONE = serializers.CharField(
|
||||||
max_length=256, required=False, validators=[PhoneValidator(), ],
|
max_length=256, required=False, validators=[PhoneValidator(), ],
|
||||||
allow_blank=True, label=_('Test phone')
|
allow_blank=True, label=_('Test phone')
|
||||||
|
@ -38,7 +40,7 @@ class BaseSMSSettingSerializer(serializers.Serializer):
|
||||||
class AlibabaSMSSettingSerializer(BaseSMSSettingSerializer):
|
class AlibabaSMSSettingSerializer(BaseSMSSettingSerializer):
|
||||||
ALIBABA_ACCESS_KEY_ID = serializers.CharField(max_length=256, required=True, label='AccessKeyId')
|
ALIBABA_ACCESS_KEY_ID = serializers.CharField(max_length=256, required=True, label='AccessKeyId')
|
||||||
ALIBABA_ACCESS_KEY_SECRET = EncryptedField(
|
ALIBABA_ACCESS_KEY_SECRET = EncryptedField(
|
||||||
max_length=256, required=False, label='AccessKeySecret',
|
max_length=256, required=False, label='access_key_secret',
|
||||||
)
|
)
|
||||||
ALIBABA_VERIFY_SIGN_NAME = serializers.CharField(max_length=256, required=True, label=_('Signature'))
|
ALIBABA_VERIFY_SIGN_NAME = serializers.CharField(max_length=256, required=True, label=_('Signature'))
|
||||||
ALIBABA_VERIFY_TEMPLATE_CODE = serializers.CharField(max_length=256, required=True, label=_('Template code'))
|
ALIBABA_VERIFY_TEMPLATE_CODE = serializers.CharField(max_length=256, required=True, label=_('Template code'))
|
||||||
|
|
|
@ -7,6 +7,8 @@ __all__ = [
|
||||||
|
|
||||||
|
|
||||||
class SSOSettingSerializer(serializers.Serializer):
|
class SSOSettingSerializer(serializers.Serializer):
|
||||||
|
PREFIX_TITLE = '%s-%s' % (_('Authentication'), _('SSO'))
|
||||||
|
|
||||||
AUTH_SSO = serializers.BooleanField(
|
AUTH_SSO = serializers.BooleanField(
|
||||||
required=False, label=_('Enable SSO auth'),
|
required=False, label=_('Enable SSO auth'),
|
||||||
help_text=_("Other service can using SSO token login to JumpServer without password")
|
help_text=_("Other service can using SSO token login to JumpServer without password")
|
||||||
|
|
|
@ -7,6 +7,8 @@ __all__ = ['WeComSettingSerializer']
|
||||||
|
|
||||||
|
|
||||||
class WeComSettingSerializer(serializers.Serializer):
|
class WeComSettingSerializer(serializers.Serializer):
|
||||||
|
PREFIX_TITLE = '%s-%s' % (_('Authentication'), _('WeCom'))
|
||||||
|
|
||||||
WECOM_CORPID = serializers.CharField(max_length=256, required=True, label='corpid')
|
WECOM_CORPID = serializers.CharField(max_length=256, required=True, label='corpid')
|
||||||
WECOM_AGENTID = serializers.CharField(max_length=256, required=True, label='agentid')
|
WECOM_AGENTID = serializers.CharField(max_length=256, required=True, label='agentid')
|
||||||
WECOM_SECRET = EncryptedField(max_length=256, required=False, label='secret')
|
WECOM_SECRET = EncryptedField(max_length=256, required=False, label='secret')
|
||||||
|
|
|
@ -24,6 +24,8 @@ class AnnouncementSerializer(serializers.Serializer):
|
||||||
|
|
||||||
|
|
||||||
class BasicSettingSerializer(serializers.Serializer):
|
class BasicSettingSerializer(serializers.Serializer):
|
||||||
|
PREFIX_TITLE = _('Basic')
|
||||||
|
|
||||||
SITE_URL = serializers.URLField(
|
SITE_URL = serializers.URLField(
|
||||||
required=True, label=_("Site url"),
|
required=True, label=_("Site url"),
|
||||||
help_text=_('eg: http://dev.jumpserver.org:8080')
|
help_text=_('eg: http://dev.jumpserver.org:8080')
|
||||||
|
|
|
@ -5,6 +5,8 @@ __all__ = ['CleaningSerializer']
|
||||||
|
|
||||||
|
|
||||||
class CleaningSerializer(serializers.Serializer):
|
class CleaningSerializer(serializers.Serializer):
|
||||||
|
PREFIX_TITLE = _('Period clean')
|
||||||
|
|
||||||
LOGIN_LOG_KEEP_DAYS = serializers.IntegerField(
|
LOGIN_LOG_KEEP_DAYS = serializers.IntegerField(
|
||||||
min_value=1, max_value=9999,
|
min_value=1, max_value=9999,
|
||||||
label=_("Login log keep days"), help_text=_("Unit: day")
|
label=_("Login log keep days"), help_text=_("Unit: day")
|
||||||
|
|
|
@ -16,6 +16,7 @@ class MailTestSerializer(serializers.Serializer):
|
||||||
|
|
||||||
class EmailSettingSerializer(serializers.Serializer):
|
class EmailSettingSerializer(serializers.Serializer):
|
||||||
# encrypt_fields 现在使用 write_only 来判断了
|
# encrypt_fields 现在使用 write_only 来判断了
|
||||||
|
PREFIX_TITLE = _('Email')
|
||||||
|
|
||||||
EMAIL_HOST = serializers.CharField(max_length=1024, required=True, label=_("SMTP host"))
|
EMAIL_HOST = serializers.CharField(max_length=1024, required=True, label=_("SMTP host"))
|
||||||
EMAIL_PORT = serializers.CharField(max_length=5, required=True, label=_("SMTP port"))
|
EMAIL_PORT = serializers.CharField(max_length=5, required=True, label=_("SMTP port"))
|
||||||
|
@ -46,6 +47,8 @@ class EmailSettingSerializer(serializers.Serializer):
|
||||||
|
|
||||||
|
|
||||||
class EmailContentSettingSerializer(serializers.Serializer):
|
class EmailContentSettingSerializer(serializers.Serializer):
|
||||||
|
PREFIX_TITLE = _('Email')
|
||||||
|
|
||||||
EMAIL_CUSTOM_USER_CREATED_SUBJECT = serializers.CharField(
|
EMAIL_CUSTOM_USER_CREATED_SUBJECT = serializers.CharField(
|
||||||
max_length=1024, allow_blank=True, required=False,
|
max_length=1024, allow_blank=True, required=False,
|
||||||
label=_('Create user email subject'),
|
label=_('Create user email subject'),
|
||||||
|
|
|
@ -3,6 +3,8 @@ from rest_framework import serializers
|
||||||
|
|
||||||
|
|
||||||
class OtherSettingSerializer(serializers.Serializer):
|
class OtherSettingSerializer(serializers.Serializer):
|
||||||
|
PREFIX_TITLE = _('More...')
|
||||||
|
|
||||||
EMAIL_SUFFIX = serializers.CharField(
|
EMAIL_SUFFIX = serializers.CharField(
|
||||||
required=False, max_length=1024, label=_("Email suffix"),
|
required=False, max_length=1024, label=_("Email suffix"),
|
||||||
help_text=_('This is used by default if no email is returned during SSO authentication')
|
help_text=_('This is used by default if no email is returned during SSO authentication')
|
||||||
|
|
|
@ -143,6 +143,8 @@ class SecurityAuthSerializer(serializers.Serializer):
|
||||||
|
|
||||||
|
|
||||||
class SecuritySettingSerializer(SecurityPasswordRuleSerializer, SecurityAuthSerializer):
|
class SecuritySettingSerializer(SecurityPasswordRuleSerializer, SecurityAuthSerializer):
|
||||||
|
PREFIX_TITLE = _('Security')
|
||||||
|
|
||||||
SECURITY_SERVICE_ACCOUNT_REGISTRATION = serializers.BooleanField(
|
SECURITY_SERVICE_ACCOUNT_REGISTRATION = serializers.BooleanField(
|
||||||
required=True, label=_('Enable terminal register'),
|
required=True, label=_('Enable terminal register'),
|
||||||
help_text=_(
|
help_text=_(
|
||||||
|
|
|
@ -1,4 +1,6 @@
|
||||||
# coding: utf-8
|
# coding: utf-8
|
||||||
|
from django.core.cache import cache
|
||||||
|
from django.utils.translation import ugettext_lazy as _
|
||||||
|
|
||||||
from .basic import BasicSettingSerializer
|
from .basic import BasicSettingSerializer
|
||||||
from .other import OtherSettingSerializer
|
from .other import OtherSettingSerializer
|
||||||
|
@ -7,7 +9,8 @@ from .auth import (
|
||||||
LDAPSettingSerializer, OIDCSettingSerializer, KeycloakSettingSerializer,
|
LDAPSettingSerializer, OIDCSettingSerializer, KeycloakSettingSerializer,
|
||||||
CASSettingSerializer, RadiusSettingSerializer, FeiShuSettingSerializer,
|
CASSettingSerializer, RadiusSettingSerializer, FeiShuSettingSerializer,
|
||||||
WeComSettingSerializer, DingTalkSettingSerializer, AlibabaSMSSettingSerializer,
|
WeComSettingSerializer, DingTalkSettingSerializer, AlibabaSMSSettingSerializer,
|
||||||
TencentSMSSettingSerializer, CMPP2SMSSettingSerializer
|
TencentSMSSettingSerializer, CMPP2SMSSettingSerializer, AuthSettingSerializer,
|
||||||
|
SAML2SettingSerializer, OAuth2SettingSerializer, SSOSettingSerializer
|
||||||
)
|
)
|
||||||
from .terminal import TerminalSettingSerializer
|
from .terminal import TerminalSettingSerializer
|
||||||
from .security import SecuritySettingSerializer
|
from .security import SecuritySettingSerializer
|
||||||
|
@ -22,6 +25,7 @@ __all__ = [
|
||||||
class SettingsSerializer(
|
class SettingsSerializer(
|
||||||
BasicSettingSerializer,
|
BasicSettingSerializer,
|
||||||
LDAPSettingSerializer,
|
LDAPSettingSerializer,
|
||||||
|
AuthSettingSerializer,
|
||||||
TerminalSettingSerializer,
|
TerminalSettingSerializer,
|
||||||
SecuritySettingSerializer,
|
SecuritySettingSerializer,
|
||||||
WeComSettingSerializer,
|
WeComSettingSerializer,
|
||||||
|
@ -31,13 +35,33 @@ class SettingsSerializer(
|
||||||
EmailContentSettingSerializer,
|
EmailContentSettingSerializer,
|
||||||
OtherSettingSerializer,
|
OtherSettingSerializer,
|
||||||
OIDCSettingSerializer,
|
OIDCSettingSerializer,
|
||||||
|
SAML2SettingSerializer,
|
||||||
|
OAuth2SettingSerializer,
|
||||||
KeycloakSettingSerializer,
|
KeycloakSettingSerializer,
|
||||||
CASSettingSerializer,
|
CASSettingSerializer,
|
||||||
RadiusSettingSerializer,
|
RadiusSettingSerializer,
|
||||||
|
SSOSettingSerializer,
|
||||||
CleaningSerializer,
|
CleaningSerializer,
|
||||||
AlibabaSMSSettingSerializer,
|
AlibabaSMSSettingSerializer,
|
||||||
TencentSMSSettingSerializer,
|
TencentSMSSettingSerializer,
|
||||||
CMPP2SMSSettingSerializer,
|
CMPP2SMSSettingSerializer,
|
||||||
):
|
):
|
||||||
|
CACHE_KEY = 'SETTING_FIELDS_MAPPING'
|
||||||
|
|
||||||
# encrypt_fields 现在使用 write_only 来判断了
|
# encrypt_fields 现在使用 write_only 来判断了
|
||||||
pass
|
def __init__(self, **kwargs):
|
||||||
|
super().__init__(**kwargs)
|
||||||
|
self.fields_label_mapping = None
|
||||||
|
|
||||||
|
# 单次计算量不大,搞个缓存,以防操作日志大量写入时,这里影响性能
|
||||||
|
def get_field_label(self, field_name):
|
||||||
|
if self.fields_label_mapping is None:
|
||||||
|
self.fields_label_mapping = {}
|
||||||
|
for subclass in SettingsSerializer.__bases__:
|
||||||
|
prefix = getattr(subclass, 'PREFIX_TITLE', _('Setting'))
|
||||||
|
fields = subclass().get_fields()
|
||||||
|
for name, item in fields.items():
|
||||||
|
label = '[%s] %s' % (prefix, getattr(item, 'label', ''))
|
||||||
|
self.fields_label_mapping[name] = label
|
||||||
|
cache.set(self.CACHE_KEY, self.fields_label_mapping, 3600 * 24)
|
||||||
|
return self.fields_label_mapping.get(field_name)
|
||||||
|
|
|
@ -3,6 +3,8 @@ from rest_framework import serializers
|
||||||
|
|
||||||
|
|
||||||
class TerminalSettingSerializer(serializers.Serializer):
|
class TerminalSettingSerializer(serializers.Serializer):
|
||||||
|
PREFIX_TITLE = _('Terminal')
|
||||||
|
|
||||||
SORT_BY_CHOICES = (
|
SORT_BY_CHOICES = (
|
||||||
('hostname', _('Hostname')),
|
('hostname', _('Hostname')),
|
||||||
('ip', _('IP'))
|
('ip', _('IP'))
|
||||||
|
|
|
@ -1,109 +1,18 @@
|
||||||
# -*- coding: utf-8 -*-
|
# -*- coding: utf-8 -*-
|
||||||
#
|
#
|
||||||
import pytz
|
import pytz
|
||||||
import inspect
|
|
||||||
|
|
||||||
from datetime import datetime
|
from datetime import datetime
|
||||||
from functools import reduce, partial
|
|
||||||
from itertools import groupby
|
|
||||||
from uuid import UUID
|
|
||||||
|
|
||||||
from django.utils.translation import gettext_lazy as _
|
|
||||||
from django.db.models import QuerySet as DJQuerySet
|
|
||||||
from elasticsearch import Elasticsearch
|
|
||||||
from elasticsearch.helpers import bulk
|
|
||||||
from elasticsearch.exceptions import RequestError, NotFoundError
|
|
||||||
|
|
||||||
from common.utils.common import lazyproperty
|
|
||||||
from common.utils import get_logger
|
from common.utils import get_logger
|
||||||
from common.utils.timezone import local_now_date_display, utc_now
|
from common.plugins.es import ES
|
||||||
from common.exceptions import JMSException
|
|
||||||
from terminal.models import Command
|
|
||||||
|
|
||||||
logger = get_logger(__file__)
|
logger = get_logger(__file__)
|
||||||
|
|
||||||
|
|
||||||
class InvalidElasticsearch(JMSException):
|
class CommandStore(ES):
|
||||||
default_code = 'invalid_elasticsearch'
|
|
||||||
default_detail = _('Invalid elasticsearch config')
|
|
||||||
|
|
||||||
|
|
||||||
class NotSupportElasticsearch8(JMSException):
|
|
||||||
default_code = 'not_support_elasticsearch8'
|
|
||||||
default_detail = _('Not Support Elasticsearch8')
|
|
||||||
|
|
||||||
|
|
||||||
class CommandStore(object):
|
|
||||||
def __init__(self, config):
|
def __init__(self, config):
|
||||||
self.doc_type = config.get("DOC_TYPE") or '_doc'
|
|
||||||
self.index_prefix = config.get('INDEX') or 'jumpserver'
|
|
||||||
self.is_index_by_date = bool(config.get('INDEX_BY_DATE'))
|
|
||||||
self.exact_fields = {}
|
|
||||||
self.match_fields = {}
|
|
||||||
hosts = config.get("HOSTS")
|
|
||||||
kwargs = config.get("OTHER", {})
|
|
||||||
|
|
||||||
ignore_verify_certs = kwargs.pop('IGNORE_VERIFY_CERTS', False)
|
|
||||||
if ignore_verify_certs:
|
|
||||||
kwargs['verify_certs'] = None
|
|
||||||
self.es = Elasticsearch(hosts=hosts, max_retries=0, **kwargs)
|
|
||||||
|
|
||||||
self.exact_fields = set()
|
|
||||||
self.match_fields = {'input', 'risk_level', 'user', 'asset', 'system_user'}
|
|
||||||
may_exact_fields = {'session', 'org_id'}
|
|
||||||
|
|
||||||
if self.is_new_index_type():
|
|
||||||
self.exact_fields.update(may_exact_fields)
|
|
||||||
self.doc_type = '_doc'
|
|
||||||
else:
|
|
||||||
self.match_fields.update(may_exact_fields)
|
|
||||||
|
|
||||||
self.init_index(config)
|
|
||||||
|
|
||||||
def init_index(self, config):
|
|
||||||
if self.is_index_by_date:
|
|
||||||
date = local_now_date_display()
|
|
||||||
self.index = '%s-%s' % (self.index_prefix, date)
|
|
||||||
self.query_index = '%s-alias' % self.index_prefix
|
|
||||||
else:
|
|
||||||
self.index = config.get("INDEX") or 'jumpserver'
|
|
||||||
self.query_index = config.get("INDEX") or 'jumpserver'
|
|
||||||
|
|
||||||
def is_new_index_type(self):
|
|
||||||
if not self.ping(timeout=3):
|
|
||||||
return False
|
|
||||||
|
|
||||||
info = self.es.info()
|
|
||||||
version = info['version']['number'].split('.')[0]
|
|
||||||
|
|
||||||
if version == '8':
|
|
||||||
raise NotSupportElasticsearch8
|
|
||||||
|
|
||||||
try:
|
|
||||||
# 获取索引信息,如果没有定义,直接返回
|
|
||||||
data = self.es.indices.get_mapping(self.index)
|
|
||||||
except NotFoundError:
|
|
||||||
return False
|
|
||||||
|
|
||||||
try:
|
|
||||||
if version == '6':
|
|
||||||
# 检测索引是不是新的类型 es6
|
|
||||||
properties = data[self.index]['mappings']['data']['properties']
|
|
||||||
else:
|
|
||||||
# 检测索引是不是新的类型 es7 default index type: _doc
|
|
||||||
properties = data[self.index]['mappings']['properties']
|
|
||||||
if properties['session']['type'] == 'keyword' \
|
|
||||||
and properties['org_id']['type'] == 'keyword':
|
|
||||||
return True
|
|
||||||
except KeyError:
|
|
||||||
return False
|
|
||||||
|
|
||||||
def pre_use_check(self):
|
|
||||||
if not self.ping(timeout=3):
|
|
||||||
raise InvalidElasticsearch
|
|
||||||
self._ensure_index_exists()
|
|
||||||
|
|
||||||
def _ensure_index_exists(self):
|
|
||||||
properties = {
|
properties = {
|
||||||
"session": {
|
"session": {
|
||||||
"type": "keyword"
|
"type": "keyword"
|
||||||
|
@ -118,25 +27,11 @@ class CommandStore(object):
|
||||||
"type": "long"
|
"type": "long"
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
info = self.es.info()
|
exact_fields = {}
|
||||||
version = info['version']['number'].split('.')[0]
|
match_fields = {'input', 'risk_level', 'user', 'asset', 'system_user'}
|
||||||
if version == '6':
|
keyword_fields = {'session', 'org_id'}
|
||||||
mappings = {'mappings': {'data': {'properties': properties}}}
|
|
||||||
else:
|
|
||||||
mappings = {'mappings': {'properties': properties}}
|
|
||||||
|
|
||||||
if self.is_index_by_date:
|
super().__init__(config, properties, keyword_fields, exact_fields, match_fields)
|
||||||
mappings['aliases'] = {
|
|
||||||
self.query_index: {}
|
|
||||||
}
|
|
||||||
try:
|
|
||||||
self.es.indices.create(self.index, body=mappings)
|
|
||||||
return
|
|
||||||
except RequestError as e:
|
|
||||||
if e.error == 'resource_already_exists_exception':
|
|
||||||
logger.warning(e)
|
|
||||||
else:
|
|
||||||
logger.exception(e)
|
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def make_data(command):
|
def make_data(command):
|
||||||
|
@ -150,274 +45,14 @@ class CommandStore(object):
|
||||||
data["date"] = datetime.fromtimestamp(command['timestamp'], tz=pytz.UTC)
|
data["date"] = datetime.fromtimestamp(command['timestamp'], tz=pytz.UTC)
|
||||||
return data
|
return data
|
||||||
|
|
||||||
def bulk_save(self, command_set, raise_on_error=True):
|
@staticmethod
|
||||||
actions = []
|
def handler_time_field(data):
|
||||||
for command in command_set:
|
timestamp__gte = data.get('timestamp__gte')
|
||||||
data = dict(
|
timestamp__lte = data.get('timestamp__lte')
|
||||||
_index=self.index,
|
|
||||||
_type=self.doc_type,
|
|
||||||
_source=self.make_data(command),
|
|
||||||
)
|
|
||||||
actions.append(data)
|
|
||||||
return bulk(self.es, actions, index=self.index, raise_on_error=raise_on_error)
|
|
||||||
|
|
||||||
def save(self, command):
|
|
||||||
"""
|
|
||||||
保存命令到数据库
|
|
||||||
"""
|
|
||||||
data = self.make_data(command)
|
|
||||||
return self.es.index(index=self.index, doc_type=self.doc_type, body=data)
|
|
||||||
|
|
||||||
def filter(self, query: dict, from_=None, size=None, sort=None):
|
|
||||||
try:
|
|
||||||
data = self._filter(query, from_, size, sort)
|
|
||||||
except Exception as e:
|
|
||||||
logger.error('ES filter error: {}'.format(e))
|
|
||||||
data = []
|
|
||||||
return data
|
|
||||||
|
|
||||||
def _filter(self, query: dict, from_=None, size=None, sort=None):
|
|
||||||
body = self.get_query_body(**query)
|
|
||||||
|
|
||||||
data = self.es.search(
|
|
||||||
index=self.query_index, doc_type=self.doc_type, body=body, from_=from_, size=size,
|
|
||||||
sort=sort
|
|
||||||
)
|
|
||||||
source_data = []
|
|
||||||
for item in data['hits']['hits']:
|
|
||||||
if item:
|
|
||||||
item['_source'].update({'id': item['_id']})
|
|
||||||
source_data.append(item['_source'])
|
|
||||||
|
|
||||||
return Command.from_multi_dict(source_data)
|
|
||||||
|
|
||||||
def count(self, **query):
|
|
||||||
try:
|
|
||||||
body = self.get_query_body(**query)
|
|
||||||
data = self.es.count(index=self.query_index, doc_type=self.doc_type, body=body)
|
|
||||||
count = data["count"]
|
|
||||||
except Exception as e:
|
|
||||||
logger.error('ES count error: {}'.format(e))
|
|
||||||
count = 0
|
|
||||||
return count
|
|
||||||
|
|
||||||
def __getattr__(self, item):
|
|
||||||
return getattr(self.es, item)
|
|
||||||
|
|
||||||
def all(self):
|
|
||||||
"""返回所有数据"""
|
|
||||||
raise NotImplementedError("Not support")
|
|
||||||
|
|
||||||
def ping(self, timeout=None):
|
|
||||||
try:
|
|
||||||
return self.es.ping(request_timeout=timeout)
|
|
||||||
except Exception:
|
|
||||||
return False
|
|
||||||
|
|
||||||
def get_query_body(self, **kwargs):
|
|
||||||
new_kwargs = {}
|
|
||||||
for k, v in kwargs.items():
|
|
||||||
new_kwargs[k] = str(v) if isinstance(v, UUID) else v
|
|
||||||
kwargs = new_kwargs
|
|
||||||
|
|
||||||
index_in_field = 'id__in'
|
|
||||||
exact_fields = self.exact_fields
|
|
||||||
match_fields = self.match_fields
|
|
||||||
|
|
||||||
match = {}
|
|
||||||
exact = {}
|
|
||||||
index = {}
|
|
||||||
|
|
||||||
if index_in_field in kwargs:
|
|
||||||
index['values'] = kwargs[index_in_field]
|
|
||||||
|
|
||||||
for k, v in kwargs.items():
|
|
||||||
if k in exact_fields:
|
|
||||||
exact[k] = v
|
|
||||||
elif k in match_fields:
|
|
||||||
match[k] = v
|
|
||||||
|
|
||||||
# 处理时间
|
|
||||||
timestamp__gte = kwargs.get('timestamp__gte')
|
|
||||||
timestamp__lte = kwargs.get('timestamp__lte')
|
|
||||||
timestamp_range = {}
|
timestamp_range = {}
|
||||||
|
|
||||||
if timestamp__gte:
|
if timestamp__gte:
|
||||||
timestamp_range['gte'] = timestamp__gte
|
timestamp_range['gte'] = timestamp__gte
|
||||||
if timestamp__lte:
|
if timestamp__lte:
|
||||||
timestamp_range['lte'] = timestamp__lte
|
timestamp_range['lte'] = timestamp__lte
|
||||||
|
return 'timestamp', timestamp_range
|
||||||
# 处理组织
|
|
||||||
should = []
|
|
||||||
org_id = match.get('org_id')
|
|
||||||
|
|
||||||
real_default_org_id = '00000000-0000-0000-0000-000000000002'
|
|
||||||
root_org_id = '00000000-0000-0000-0000-000000000000'
|
|
||||||
|
|
||||||
if org_id == root_org_id:
|
|
||||||
match.pop('org_id')
|
|
||||||
elif org_id in (real_default_org_id, ''):
|
|
||||||
match.pop('org_id')
|
|
||||||
should.append({
|
|
||||||
'bool': {
|
|
||||||
'must_not': [
|
|
||||||
{
|
|
||||||
'wildcard': {'org_id': '*'}
|
|
||||||
}
|
|
||||||
]}
|
|
||||||
})
|
|
||||||
should.append({'match': {'org_id': real_default_org_id}})
|
|
||||||
|
|
||||||
# 构建 body
|
|
||||||
body = {
|
|
||||||
'query': {
|
|
||||||
'bool': {
|
|
||||||
'must': [
|
|
||||||
{'match': {k: v}} for k, v in match.items()
|
|
||||||
],
|
|
||||||
'should': should,
|
|
||||||
'filter': [
|
|
||||||
{
|
|
||||||
'term': {k: v}
|
|
||||||
} for k, v in exact.items()
|
|
||||||
] + [
|
|
||||||
{
|
|
||||||
'range': {
|
|
||||||
'timestamp': timestamp_range
|
|
||||||
}
|
|
||||||
}
|
|
||||||
] + [
|
|
||||||
{
|
|
||||||
'ids': {k: v}
|
|
||||||
} for k, v in index.items()
|
|
||||||
]
|
|
||||||
}
|
|
||||||
},
|
|
||||||
}
|
|
||||||
return body
|
|
||||||
|
|
||||||
|
|
||||||
class QuerySet(DJQuerySet):
|
|
||||||
_method_calls = None
|
|
||||||
_storage = None
|
|
||||||
_command_store_config = None
|
|
||||||
_slice = None # (from_, size)
|
|
||||||
default_days_ago = 5
|
|
||||||
max_result_window = 10000
|
|
||||||
|
|
||||||
def __init__(self, command_store_config):
|
|
||||||
self._method_calls = []
|
|
||||||
self._command_store_config = command_store_config
|
|
||||||
self._storage = CommandStore(command_store_config)
|
|
||||||
|
|
||||||
# 命令列表模糊搜索时报错
|
|
||||||
super().__init__()
|
|
||||||
|
|
||||||
@lazyproperty
|
|
||||||
def _grouped_method_calls(self):
|
|
||||||
_method_calls = {k: list(v) for k, v in groupby(self._method_calls, lambda x: x[0])}
|
|
||||||
return _method_calls
|
|
||||||
|
|
||||||
@lazyproperty
|
|
||||||
def _filter_kwargs(self):
|
|
||||||
_method_calls = self._grouped_method_calls
|
|
||||||
filter_calls = _method_calls.get('filter')
|
|
||||||
if not filter_calls:
|
|
||||||
return {}
|
|
||||||
names, multi_args, multi_kwargs = zip(*filter_calls)
|
|
||||||
kwargs = reduce(lambda x, y: {**x, **y}, multi_kwargs, {})
|
|
||||||
|
|
||||||
striped_kwargs = {}
|
|
||||||
for k, v in kwargs.items():
|
|
||||||
k = k.replace('__exact', '')
|
|
||||||
k = k.replace('__startswith', '')
|
|
||||||
k = k.replace('__icontains', '')
|
|
||||||
striped_kwargs[k] = v
|
|
||||||
return striped_kwargs
|
|
||||||
|
|
||||||
@lazyproperty
|
|
||||||
def _sort(self):
|
|
||||||
order_by = self._grouped_method_calls.get('order_by')
|
|
||||||
if order_by:
|
|
||||||
for call in reversed(order_by):
|
|
||||||
fields = call[1]
|
|
||||||
if fields:
|
|
||||||
field = fields[-1]
|
|
||||||
|
|
||||||
if field.startswith('-'):
|
|
||||||
direction = 'desc'
|
|
||||||
else:
|
|
||||||
direction = 'asc'
|
|
||||||
field = field.lstrip('-+')
|
|
||||||
sort = f'{field}:{direction}'
|
|
||||||
return sort
|
|
||||||
|
|
||||||
def __execute(self):
|
|
||||||
_filter_kwargs = self._filter_kwargs
|
|
||||||
_sort = self._sort
|
|
||||||
from_, size = self._slice or (None, None)
|
|
||||||
data = self._storage.filter(_filter_kwargs, from_=from_, size=size, sort=_sort)
|
|
||||||
return data
|
|
||||||
|
|
||||||
def __stage_method_call(self, item, *args, **kwargs):
|
|
||||||
_clone = self.__clone()
|
|
||||||
_clone._method_calls.append((item, args, kwargs))
|
|
||||||
return _clone
|
|
||||||
|
|
||||||
def __clone(self):
|
|
||||||
uqs = QuerySet(self._command_store_config)
|
|
||||||
uqs._method_calls = self._method_calls.copy()
|
|
||||||
uqs._slice = self._slice
|
|
||||||
uqs.model = self.model
|
|
||||||
return uqs
|
|
||||||
|
|
||||||
def count(self, limit_to_max_result_window=True):
|
|
||||||
filter_kwargs = self._filter_kwargs
|
|
||||||
count = self._storage.count(**filter_kwargs)
|
|
||||||
if limit_to_max_result_window:
|
|
||||||
count = min(count, self.max_result_window)
|
|
||||||
return count
|
|
||||||
|
|
||||||
def __getattribute__(self, item):
|
|
||||||
if any((
|
|
||||||
item.startswith('__'),
|
|
||||||
item in QuerySet.__dict__,
|
|
||||||
)):
|
|
||||||
return object.__getattribute__(self, item)
|
|
||||||
|
|
||||||
origin_attr = object.__getattribute__(self, item)
|
|
||||||
if not inspect.ismethod(origin_attr):
|
|
||||||
return origin_attr
|
|
||||||
|
|
||||||
attr = partial(self.__stage_method_call, item)
|
|
||||||
return attr
|
|
||||||
|
|
||||||
def __getitem__(self, item):
|
|
||||||
max_window = self.max_result_window
|
|
||||||
if isinstance(item, slice):
|
|
||||||
if self._slice is None:
|
|
||||||
clone = self.__clone()
|
|
||||||
from_ = item.start or 0
|
|
||||||
if item.stop is None:
|
|
||||||
size = self.max_result_window - from_
|
|
||||||
else:
|
|
||||||
size = item.stop - from_
|
|
||||||
|
|
||||||
if from_ + size > max_window:
|
|
||||||
if from_ >= max_window:
|
|
||||||
from_ = max_window
|
|
||||||
size = 0
|
|
||||||
else:
|
|
||||||
size = max_window - from_
|
|
||||||
clone._slice = (from_, size)
|
|
||||||
return clone
|
|
||||||
return self.__execute()[item]
|
|
||||||
|
|
||||||
def __repr__(self):
|
|
||||||
return self.__execute().__repr__()
|
|
||||||
|
|
||||||
def __iter__(self):
|
|
||||||
return iter(self.__execute())
|
|
||||||
|
|
||||||
def __len__(self):
|
|
||||||
return self.count()
|
|
||||||
|
|
|
@ -10,6 +10,7 @@ from django.db import models
|
||||||
from django.utils.translation import ugettext_lazy as _
|
from django.utils.translation import ugettext_lazy as _
|
||||||
from django.conf import settings
|
from django.conf import settings
|
||||||
from common.mixins import CommonModelMixin
|
from common.mixins import CommonModelMixin
|
||||||
|
from common.plugins.es import QuerySet as ESQuerySet
|
||||||
from common.utils import get_logger
|
from common.utils import get_logger
|
||||||
from common.db.fields import EncryptJsonDictTextField
|
from common.db.fields import EncryptJsonDictTextField
|
||||||
from common.utils.timezone import local_now_date_display
|
from common.utils.timezone import local_now_date_display
|
||||||
|
@ -117,7 +118,8 @@ class CommandStorage(CommonStorageModelMixin, CommonModelMixin):
|
||||||
|
|
||||||
if self.type in TYPE_ENGINE_MAPPING:
|
if self.type in TYPE_ENGINE_MAPPING:
|
||||||
engine_mod = import_module(TYPE_ENGINE_MAPPING[self.type])
|
engine_mod = import_module(TYPE_ENGINE_MAPPING[self.type])
|
||||||
qs = engine_mod.QuerySet(self.config)
|
store = engine_mod.CommandStore(self.config)
|
||||||
|
qs = ESQuerySet(store)
|
||||||
qs.model = Command
|
qs.model = Command
|
||||||
return qs
|
return qs
|
||||||
|
|
||||||
|
|
|
@ -6,6 +6,7 @@ from django.utils.translation import ugettext_lazy as _
|
||||||
from django.conf import settings
|
from django.conf import settings
|
||||||
|
|
||||||
from common.utils import get_logger
|
from common.utils import get_logger
|
||||||
|
from common.const.signals import SKIP_SIGNAL
|
||||||
from users.models import User
|
from users.models import User
|
||||||
from orgs.utils import tmp_to_root_org
|
from orgs.utils import tmp_to_root_org
|
||||||
from .status import Status
|
from .status import Status
|
||||||
|
@ -107,8 +108,8 @@ class Terminal(StorageMixin, TerminalStatusMixin, models.Model):
|
||||||
http_port = models.IntegerField(verbose_name=_('HTTP Port'), default=5000)
|
http_port = models.IntegerField(verbose_name=_('HTTP Port'), default=5000)
|
||||||
command_storage = models.CharField(max_length=128, verbose_name=_("Command storage"), default='default')
|
command_storage = models.CharField(max_length=128, verbose_name=_("Command storage"), default='default')
|
||||||
replay_storage = models.CharField(max_length=128, verbose_name=_("Replay storage"), default='default')
|
replay_storage = models.CharField(max_length=128, verbose_name=_("Replay storage"), default='default')
|
||||||
user = models.OneToOneField(User, related_name='terminal', verbose_name='Application User', null=True, on_delete=models.CASCADE)
|
user = models.OneToOneField(User, related_name='terminal', verbose_name=_('Application User'), null=True, on_delete=models.CASCADE)
|
||||||
is_accepted = models.BooleanField(default=False, verbose_name='Is Accepted')
|
is_accepted = models.BooleanField(default=False, verbose_name=_('Is Accepted'))
|
||||||
is_deleted = models.BooleanField(default=False)
|
is_deleted = models.BooleanField(default=False)
|
||||||
date_created = models.DateTimeField(auto_now_add=True)
|
date_created = models.DateTimeField(auto_now_add=True)
|
||||||
comment = models.TextField(blank=True, verbose_name=_('Comment'))
|
comment = models.TextField(blank=True, verbose_name=_('Comment'))
|
||||||
|
@ -159,6 +160,7 @@ class Terminal(StorageMixin, TerminalStatusMixin, models.Model):
|
||||||
|
|
||||||
def delete(self, using=None, keep_parents=False):
|
def delete(self, using=None, keep_parents=False):
|
||||||
if self.user:
|
if self.user:
|
||||||
|
setattr(self.user, SKIP_SIGNAL, True)
|
||||||
self.user.delete()
|
self.user.delete()
|
||||||
self.user = None
|
self.user = None
|
||||||
self.is_deleted = True
|
self.is_deleted = True
|
||||||
|
|
|
@ -70,6 +70,9 @@ class CommandAlertMessage(CommandAlertMixin, SystemMessage):
|
||||||
def __init__(self, command):
|
def __init__(self, command):
|
||||||
self.command = command
|
self.command = command
|
||||||
|
|
||||||
|
def __str__(self):
|
||||||
|
return str(self.message_type_label)
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def gen_test_msg(cls):
|
def gen_test_msg(cls):
|
||||||
command = Command.objects.first().to_dict()
|
command = Command.objects.first().to_dict()
|
||||||
|
|
|
@ -14,7 +14,7 @@ class Migration(migrations.Migration):
|
||||||
migrations.AlterField(
|
migrations.AlterField(
|
||||||
model_name='user',
|
model_name='user',
|
||||||
name='_otp_secret_key',
|
name='_otp_secret_key',
|
||||||
field=common.db.fields.EncryptCharField(blank=True, max_length=128, null=True),
|
field=common.db.fields.EncryptCharField(blank=True, max_length=128, null=True, verbose_name='OTP secret key'),
|
||||||
),
|
),
|
||||||
migrations.AlterField(
|
migrations.AlterField(
|
||||||
model_name='user',
|
model_name='user',
|
||||||
|
|
|
@ -1,8 +1,8 @@
|
||||||
# Generated by Django 3.1 on 2021-04-27 12:43
|
# Generated by Django 3.1 on 2021-04-27 12:43
|
||||||
|
|
||||||
|
import common.db.models
|
||||||
from django.conf import settings
|
from django.conf import settings
|
||||||
from django.db import migrations, models
|
from django.db import migrations, models
|
||||||
import django.db.models.deletion
|
|
||||||
import uuid
|
import uuid
|
||||||
|
|
||||||
|
|
||||||
|
@ -19,7 +19,7 @@ class Migration(migrations.Migration):
|
||||||
('id', models.UUIDField(default=uuid.uuid4, primary_key=True, serialize=False)),
|
('id', models.UUIDField(default=uuid.uuid4, primary_key=True, serialize=False)),
|
||||||
('password', models.CharField(max_length=128)),
|
('password', models.CharField(max_length=128)),
|
||||||
('date_created', models.DateTimeField(auto_now_add=True, verbose_name='Date created')),
|
('date_created', models.DateTimeField(auto_now_add=True, verbose_name='Date created')),
|
||||||
('user', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='history_passwords', to=settings.AUTH_USER_MODEL, verbose_name='User')),
|
('user', models.ForeignKey(on_delete=common.db.models.CASCADE_SIGNAL_SKIP, related_name='history_passwords', to=settings.AUTH_USER_MODEL, verbose_name='User')),
|
||||||
],
|
],
|
||||||
),
|
),
|
||||||
]
|
]
|
||||||
|
|
|
@ -20,7 +20,7 @@ from django.shortcuts import reverse
|
||||||
from orgs.utils import current_org
|
from orgs.utils import current_org
|
||||||
from orgs.models import Organization
|
from orgs.models import Organization
|
||||||
from rbac.const import Scope
|
from rbac.const import Scope
|
||||||
from common.db import fields
|
from common.db import fields, models as jms_models
|
||||||
from common.utils import (
|
from common.utils import (
|
||||||
date_expired_default, get_logger, lazyproperty, random_string, bulk_create_with_signal
|
date_expired_default, get_logger, lazyproperty, random_string, bulk_create_with_signal
|
||||||
)
|
)
|
||||||
|
@ -691,7 +691,9 @@ class User(AuthMixin, TokenMixin, RoleMixin, MFAMixin, AbstractUser):
|
||||||
mfa_level = models.SmallIntegerField(
|
mfa_level = models.SmallIntegerField(
|
||||||
default=0, choices=MFAMixin.MFA_LEVEL_CHOICES, verbose_name=_('MFA')
|
default=0, choices=MFAMixin.MFA_LEVEL_CHOICES, verbose_name=_('MFA')
|
||||||
)
|
)
|
||||||
otp_secret_key = fields.EncryptCharField(max_length=128, blank=True, null=True)
|
otp_secret_key = fields.EncryptCharField(
|
||||||
|
max_length=128, blank=True, null=True, verbose_name=_('OTP secret key')
|
||||||
|
)
|
||||||
# Todo: Auto generate key, let user download
|
# Todo: Auto generate key, let user download
|
||||||
private_key = fields.EncryptTextField(
|
private_key = fields.EncryptTextField(
|
||||||
blank=True, null=True, verbose_name=_('Private key')
|
blank=True, null=True, verbose_name=_('Private key')
|
||||||
|
@ -705,7 +707,7 @@ class User(AuthMixin, TokenMixin, RoleMixin, MFAMixin, AbstractUser):
|
||||||
comment = models.TextField(
|
comment = models.TextField(
|
||||||
blank=True, null=True, verbose_name=_('Comment')
|
blank=True, null=True, verbose_name=_('Comment')
|
||||||
)
|
)
|
||||||
is_first_login = models.BooleanField(default=True)
|
is_first_login = models.BooleanField(default=True, verbose_name=_('Is first login'))
|
||||||
date_expired = models.DateTimeField(
|
date_expired = models.DateTimeField(
|
||||||
default=date_expired_default, blank=True, null=True,
|
default=date_expired_default, blank=True, null=True,
|
||||||
db_index=True, verbose_name=_('Date expired')
|
db_index=True, verbose_name=_('Date expired')
|
||||||
|
@ -927,7 +929,7 @@ class UserPasswordHistory(models.Model):
|
||||||
id = models.UUIDField(default=uuid.uuid4, primary_key=True)
|
id = models.UUIDField(default=uuid.uuid4, primary_key=True)
|
||||||
password = models.CharField(max_length=128)
|
password = models.CharField(max_length=128)
|
||||||
user = models.ForeignKey("users.User", related_name='history_passwords',
|
user = models.ForeignKey("users.User", related_name='history_passwords',
|
||||||
on_delete=models.CASCADE, verbose_name=_('User'))
|
on_delete=jms_models.CASCADE_SIGNAL_SKIP, verbose_name=_('User'))
|
||||||
date_created = models.DateTimeField(auto_now_add=True, verbose_name=_("Date created"))
|
date_created = models.DateTimeField(auto_now_add=True, verbose_name=_("Date created"))
|
||||||
|
|
||||||
def __str__(self):
|
def __str__(self):
|
||||||
|
|
Loading…
Reference in New Issue