mirror of https://github.com/jumpserver/jumpserver
commit
2324cdc14e
|
@ -27,6 +27,7 @@ class AppType(models.TextChoices):
|
|||
sqlserver = 'sqlserver', 'SQLServer'
|
||||
redis = 'redis', 'Redis'
|
||||
mongodb = 'mongodb', 'MongoDB'
|
||||
clickhouse = 'clickhouse', 'ClickHouse'
|
||||
|
||||
# remote-app category
|
||||
chrome = 'chrome', 'Chrome'
|
||||
|
@ -42,7 +43,7 @@ class AppType(models.TextChoices):
|
|||
return {
|
||||
AppCategory.db: [
|
||||
cls.mysql, cls.mariadb, cls.oracle, cls.pgsql,
|
||||
cls.sqlserver, cls.redis, cls.mongodb
|
||||
cls.sqlserver, cls.redis, cls.mongodb, cls.clickhouse
|
||||
],
|
||||
AppCategory.remote_app: [
|
||||
cls.chrome, cls.mysql_workbench,
|
||||
|
@ -82,4 +83,4 @@ class AppType(models.TextChoices):
|
|||
|
||||
if AppCategory.is_xpack(category):
|
||||
return True
|
||||
return tp in ['oracle', 'postgresql', 'sqlserver']
|
||||
return tp in ['oracle', 'postgresql', 'sqlserver', 'clickhouse']
|
||||
|
|
|
@ -0,0 +1,18 @@
|
|||
# Generated by Django 3.2.14 on 2022-11-04 07:06
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('applications', '0023_auto_20220715_1556'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AlterField(
|
||||
model_name='application',
|
||||
name='type',
|
||||
field=models.CharField(choices=[('mysql', 'MySQL'), ('mariadb', 'MariaDB'), ('oracle', 'Oracle'), ('postgresql', 'PostgreSQL'), ('sqlserver', 'SQLServer'), ('redis', 'Redis'), ('mongodb', 'MongoDB'), ('clickhouse', 'ClickHouse'), ('chrome', 'Chrome'), ('mysql_workbench', 'MySQL Workbench'), ('vmware_client', 'vSphere Client'), ('custom', 'Custom'), ('k8s', 'Kubernetes')], max_length=16, verbose_name='Type'),
|
||||
),
|
||||
]
|
|
@ -6,6 +6,7 @@ from .pgsql import *
|
|||
from .sqlserver import *
|
||||
from .redis import *
|
||||
from .mongodb import *
|
||||
from .clickhouse import *
|
||||
|
||||
from .chrome import *
|
||||
from .mysql_workbench import *
|
||||
|
|
|
@ -0,0 +1,10 @@
|
|||
from rest_framework import serializers
|
||||
from django.utils.translation import ugettext_lazy as _
|
||||
|
||||
from ..application_category import DBSerializer
|
||||
|
||||
__all__ = ['ClickHouseSerializer']
|
||||
|
||||
|
||||
class ClickHouseSerializer(DBSerializer):
|
||||
port = serializers.IntegerField(default=9000, label=_('Port'), allow_null=True)
|
|
@ -31,6 +31,7 @@ type_serializer_classes_mapping = {
|
|||
const.AppType.sqlserver.value: application_type.SQLServerSerializer,
|
||||
const.AppType.redis.value: application_type.RedisSerializer,
|
||||
const.AppType.mongodb.value: application_type.MongoDBSerializer,
|
||||
const.AppType.clickhouse.value: application_type.ClickHouseSerializer,
|
||||
# cloud
|
||||
const.AppType.k8s.value: application_type.K8SSerializer
|
||||
}
|
||||
|
|
|
@ -21,8 +21,8 @@ class Migration(migrations.Migration):
|
|||
('name', models.CharField(max_length=64, verbose_name='Name')),
|
||||
('is_active', models.BooleanField(default=True, verbose_name='Is active')),
|
||||
('comment', models.TextField(blank=True, default='', verbose_name='Comment')),
|
||||
('date_created', models.DateTimeField(auto_now_add=True)),
|
||||
('date_updated', models.DateTimeField(auto_now=True)),
|
||||
('date_created', models.DateTimeField(auto_now_add=True, verbose_name='Date created')),
|
||||
('date_updated', models.DateTimeField(auto_now=True, verbose_name='Date updated')),
|
||||
('created_by', models.CharField(blank=True, default='', max_length=128, verbose_name='Created by')),
|
||||
],
|
||||
options={
|
||||
|
|
|
@ -20,7 +20,7 @@ class Migration(migrations.Migration):
|
|||
fields=[
|
||||
('org_id', models.CharField(blank=True, db_index=True, default='', max_length=36, verbose_name='Organization')),
|
||||
('name', models.CharField(max_length=128, verbose_name='Name')),
|
||||
('is_periodic', models.BooleanField(default=False)),
|
||||
('is_periodic', models.BooleanField(default=False, verbose_name='Periodic perform')),
|
||||
('interval', models.IntegerField(blank=True, default=24, null=True, verbose_name='Cycle perform')),
|
||||
('crontab', models.CharField(blank=True, max_length=128, null=True, verbose_name='Regularly perform')),
|
||||
('created_by', models.CharField(blank=True, max_length=32, null=True, verbose_name='Created by')),
|
||||
|
|
|
@ -0,0 +1,18 @@
|
|||
# Generated by Django 3.2.14 on 2022-11-04 07:06
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('assets', '0092_commandfilter_nodes'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AlterField(
|
||||
model_name='systemuser',
|
||||
name='protocol',
|
||||
field=models.CharField(choices=[('ssh', 'SSH'), ('rdp', 'RDP'), ('telnet', 'Telnet'), ('vnc', 'VNC'), ('mysql', 'MySQL'), ('oracle', 'Oracle'), ('mariadb', 'MariaDB'), ('postgresql', 'PostgreSQL'), ('sqlserver', 'SQLServer'), ('redis', 'Redis'), ('mongodb', 'MongoDB'), ('clickhouse', 'ClickHouse'), ('k8s', 'K8S')], default='ssh', max_length=16, verbose_name='Protocol'),
|
||||
),
|
||||
]
|
|
@ -50,8 +50,8 @@ class CommandFilter(OrgModelMixin):
|
|||
)
|
||||
is_active = models.BooleanField(default=True, verbose_name=_('Is active'))
|
||||
comment = models.TextField(blank=True, default='', verbose_name=_("Comment"))
|
||||
date_created = models.DateTimeField(auto_now_add=True)
|
||||
date_updated = models.DateTimeField(auto_now=True)
|
||||
date_created = models.DateTimeField(auto_now_add=True, verbose_name=_('Date created'))
|
||||
date_updated = models.DateTimeField(auto_now=True, verbose_name=_('Date updated'))
|
||||
created_by = models.CharField(
|
||||
max_length=128, blank=True, default='', verbose_name=_('Created by')
|
||||
)
|
||||
|
|
|
@ -34,6 +34,7 @@ class ProtocolMixin:
|
|||
sqlserver = 'sqlserver', 'SQLServer'
|
||||
redis = 'redis', 'Redis'
|
||||
mongodb = 'mongodb', 'MongoDB'
|
||||
clickhouse = 'clickhouse', 'ClickHouse'
|
||||
k8s = 'k8s', 'K8S'
|
||||
|
||||
SUPPORT_PUSH_PROTOCOLS = [Protocol.ssh, Protocol.rdp]
|
||||
|
@ -46,7 +47,7 @@ class ProtocolMixin:
|
|||
]
|
||||
APPLICATION_CATEGORY_DB_PROTOCOLS = [
|
||||
Protocol.mysql, Protocol.mariadb, Protocol.oracle,
|
||||
Protocol.postgresql, Protocol.sqlserver,
|
||||
Protocol.postgresql, Protocol.sqlserver, Protocol.clickhouse,
|
||||
Protocol.redis, Protocol.mongodb
|
||||
]
|
||||
APPLICATION_CATEGORY_CLOUD_PROTOCOLS = [
|
||||
|
|
|
@ -1,21 +1,29 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
#
|
||||
from rest_framework.mixins import ListModelMixin, CreateModelMixin
|
||||
from importlib import import_module
|
||||
|
||||
from rest_framework.mixins import ListModelMixin, CreateModelMixin, RetrieveModelMixin
|
||||
from django.db.models import F, Value
|
||||
from django.db.models.functions import Concat
|
||||
from django.conf import settings
|
||||
from rest_framework.permissions import IsAuthenticated
|
||||
from rest_framework import generics
|
||||
|
||||
from common.drf.api import JMSReadOnlyModelViewSet
|
||||
from common.plugins.es import QuerySet as ESQuerySet
|
||||
from common.drf.filters import DatetimeRangeFilter
|
||||
from common.api import CommonGenericViewSet
|
||||
from orgs.mixins.api import OrgGenericViewSet, OrgBulkModelViewSet, OrgRelationMixin
|
||||
from orgs.utils import current_org
|
||||
from ops.models import CommandExecution
|
||||
from . import filters
|
||||
from .backends import TYPE_ENGINE_MAPPING
|
||||
from .models import FTPLog, UserLoginLog, OperateLog, PasswordChangeLog
|
||||
from .serializers import FTPLogSerializer, UserLoginLogSerializer, CommandExecutionSerializer
|
||||
from .serializers import OperateLogSerializer, PasswordChangeLogSerializer, CommandExecutionHostsRelationSerializer
|
||||
from .serializers import (
|
||||
OperateLogSerializer, OperateLogActionDetailSerializer,
|
||||
PasswordChangeLogSerializer, CommandExecutionHostsRelationSerializer
|
||||
)
|
||||
|
||||
|
||||
class FTPLogViewSet(CreateModelMixin,
|
||||
|
@ -68,7 +76,7 @@ class MyLoginLogAPIView(UserLoginCommonMixin, generics.ListAPIView):
|
|||
return qs
|
||||
|
||||
|
||||
class OperateLogViewSet(ListModelMixin, OrgGenericViewSet):
|
||||
class OperateLogViewSet(RetrieveModelMixin, ListModelMixin, OrgGenericViewSet):
|
||||
model = OperateLog
|
||||
serializer_class = OperateLogSerializer
|
||||
extra_filter_backends = [DatetimeRangeFilter]
|
||||
|
@ -79,6 +87,22 @@ class OperateLogViewSet(ListModelMixin, OrgGenericViewSet):
|
|||
search_fields = ['resource']
|
||||
ordering = ['-datetime']
|
||||
|
||||
def get_serializer_class(self):
|
||||
if self.request.query_params.get('type') == 'action_detail':
|
||||
return OperateLogActionDetailSerializer
|
||||
return super().get_serializer_class()
|
||||
|
||||
def get_queryset(self):
|
||||
qs = OperateLog.objects.all()
|
||||
es_config = settings.OPERATE_LOG_ELASTICSEARCH_CONFIG
|
||||
if es_config:
|
||||
engine_mod = import_module(TYPE_ENGINE_MAPPING['es'])
|
||||
store = engine_mod.OperateLogStore(es_config)
|
||||
if store.ping(timeout=2):
|
||||
qs = ESQuerySet(store)
|
||||
qs.model = OperateLog
|
||||
return qs
|
||||
|
||||
|
||||
class PasswordChangeLogViewSet(ListModelMixin, CommonGenericViewSet):
|
||||
queryset = PasswordChangeLog.objects.all()
|
||||
|
|
|
@ -0,0 +1,18 @@
|
|||
from importlib import import_module
|
||||
|
||||
from django.conf import settings
|
||||
|
||||
|
||||
TYPE_ENGINE_MAPPING = {
|
||||
'db': 'audits.backends.db',
|
||||
'es': 'audits.backends.es',
|
||||
}
|
||||
|
||||
|
||||
def get_operate_log_storage(default=False):
|
||||
engine_mod = import_module(TYPE_ENGINE_MAPPING['db'])
|
||||
es_config = settings.OPERATE_LOG_ELASTICSEARCH_CONFIG
|
||||
if not default and es_config:
|
||||
engine_mod = import_module(TYPE_ENGINE_MAPPING['es'])
|
||||
storage = engine_mod.OperateLogStore(es_config)
|
||||
return storage
|
|
@ -0,0 +1,38 @@
|
|||
# ~*~ coding: utf-8 ~*~
|
||||
from django.utils.translation import ugettext_lazy as _
|
||||
|
||||
from audits.models import OperateLog
|
||||
|
||||
|
||||
class OperateLogStore(object):
|
||||
def __init__(self, config):
|
||||
self.model = OperateLog
|
||||
self.max_length = 1024
|
||||
self.max_length_tip_msg = _(
|
||||
'The text content is too long. Use Elasticsearch to store operation logs'
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def ping(timeout=None):
|
||||
return True
|
||||
|
||||
def save(self, **kwargs):
|
||||
log_id = kwargs.get('id', '')
|
||||
before = kwargs.get('before') or {}
|
||||
after = kwargs.get('after') or {}
|
||||
if len(str(before)) > self.max_length:
|
||||
before = {_('Tips'): self.max_length_tip_msg}
|
||||
if len(str(after)) > self.max_length:
|
||||
after = {_('Tips'): self.max_length_tip_msg}
|
||||
|
||||
op_log = self.model.objects.filter(pk=log_id).first()
|
||||
if op_log is not None:
|
||||
raw_after = op_log.after or {}
|
||||
raw_before = op_log.before or {}
|
||||
raw_before.update(before)
|
||||
raw_after.update(after)
|
||||
op_log.before = raw_before
|
||||
op_log.after = raw_after
|
||||
op_log.save()
|
||||
else:
|
||||
self.model.objects.create(**kwargs)
|
|
@ -0,0 +1,85 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
#
|
||||
import uuid
|
||||
|
||||
from common.utils.timezone import local_now_display
|
||||
from common.utils import get_logger
|
||||
from common.utils.encode import Singleton
|
||||
from common.plugins.es import ES
|
||||
|
||||
|
||||
logger = get_logger(__file__)
|
||||
|
||||
|
||||
class OperateLogStore(ES, metaclass=Singleton):
|
||||
def __init__(self, config):
|
||||
properties = {
|
||||
"id": {
|
||||
"type": "keyword"
|
||||
},
|
||||
"user": {
|
||||
"type": "keyword"
|
||||
},
|
||||
"action": {
|
||||
"type": "keyword"
|
||||
},
|
||||
"resource_type": {
|
||||
"type": "keyword"
|
||||
},
|
||||
"org_id": {
|
||||
"type": "keyword"
|
||||
},
|
||||
"datetime": {
|
||||
"type": "date",
|
||||
"format": "yyyy-MM-dd HH:mm:ss"
|
||||
}
|
||||
}
|
||||
exact_fields = {}
|
||||
match_fields = {
|
||||
'id', 'user', 'action', 'resource_type',
|
||||
'resource', 'remote_addr', 'org_id'
|
||||
}
|
||||
keyword_fields = {
|
||||
'id', 'user', 'action', 'resource_type', 'org_id'
|
||||
}
|
||||
if not config.get('INDEX'):
|
||||
config['INDEX'] = 'jumpserver_operate_log'
|
||||
super().__init__(config, properties, keyword_fields, exact_fields, match_fields)
|
||||
self.pre_use_check()
|
||||
|
||||
@staticmethod
|
||||
def make_data(data):
|
||||
op_id = data.get('id', str(uuid.uuid4()))
|
||||
datetime_param = data.get('datetime', local_now_display())
|
||||
data = {
|
||||
'id': op_id, 'user': data['user'], 'action': data['action'],
|
||||
'resource_type': data['resource_type'], 'resource': data['resource'],
|
||||
'remote_addr': data['remote_addr'], 'datetime': datetime_param,
|
||||
'before': data['before'], 'after': data['after'], 'org_id': data['org_id']
|
||||
}
|
||||
return data
|
||||
|
||||
def save(self, **kwargs):
|
||||
log_id = kwargs.get('id', '')
|
||||
before = kwargs.get('before') or {}
|
||||
after = kwargs.get('after') or {}
|
||||
|
||||
op_log = self.get({'id': log_id})
|
||||
if op_log is not None:
|
||||
data = {'doc': {}}
|
||||
raw_after = op_log.get('after') or {}
|
||||
raw_before = op_log.get('before') or {}
|
||||
raw_before.update(before)
|
||||
raw_after.update(after)
|
||||
data['doc']['before'] = raw_before
|
||||
data['doc']['after'] = raw_after
|
||||
self.es.update(
|
||||
index=self.index, doc_type=self.doc_type,
|
||||
id=op_log.get('es_id'), body=data, refresh=True
|
||||
)
|
||||
else:
|
||||
data = self.make_data(kwargs)
|
||||
self.es.index(
|
||||
index=self.index, doc_type=self.doc_type, body=data,
|
||||
refresh=True
|
||||
)
|
|
@ -7,11 +7,13 @@ DEFAULT_CITY = _("Unknown")
|
|||
MODELS_NEED_RECORD = (
|
||||
# users
|
||||
'User', 'UserGroup',
|
||||
# authentication
|
||||
'AccessKey', 'TempToken',
|
||||
# acls
|
||||
'LoginACL', 'LoginAssetACL', 'LoginConfirmSetting',
|
||||
# assets
|
||||
'Asset', 'Node', 'AdminUser', 'SystemUser', 'Domain', 'Gateway', 'CommandFilterRule',
|
||||
'CommandFilter', 'Platform', 'AuthBook',
|
||||
'CommandFilter', 'Platform', 'Label',
|
||||
# applications
|
||||
'Application',
|
||||
# orgs
|
||||
|
@ -20,6 +22,13 @@ MODELS_NEED_RECORD = (
|
|||
'Setting',
|
||||
# perms
|
||||
'AssetPermission', 'ApplicationPermission',
|
||||
# notifications
|
||||
'SystemMsgSubscription', 'UserMsgSubscription',
|
||||
# Terminal
|
||||
'Terminal', 'Endpoint', 'EndpointRule', 'CommandStorage', 'ReplayStorage',
|
||||
# rbac
|
||||
'Role', 'SystemRole', 'OrgRole', 'RoleBinding', 'OrgRoleBinding', 'SystemRoleBinding',
|
||||
# xpack
|
||||
'License', 'Account', 'SyncInstanceTask', 'ChangeAuthPlan', 'GatherUserTask',
|
||||
'License', 'Account', 'SyncInstanceTask', 'ChangeAuthPlan', 'ApplicationChangeAuthPlan',
|
||||
'GatherUserTask', 'Interface',
|
||||
)
|
||||
|
|
|
@ -0,0 +1,183 @@
|
|||
from datetime import datetime
|
||||
|
||||
from django.db import transaction
|
||||
from django.core.cache import cache
|
||||
from django.utils.translation import ugettext_lazy as _
|
||||
|
||||
from common.utils import get_request_ip, get_logger
|
||||
from common.utils.timezone import as_current_tz
|
||||
from common.utils.encode import Singleton
|
||||
from common.local import encrypted_field_set
|
||||
from settings.serializers import SettingsSerializer
|
||||
from jumpserver.utils import current_request
|
||||
from audits.models import OperateLog
|
||||
from orgs.utils import get_current_org_id
|
||||
|
||||
from .backends import get_operate_log_storage
|
||||
|
||||
|
||||
logger = get_logger(__name__)
|
||||
|
||||
|
||||
class ModelClient:
|
||||
@staticmethod
|
||||
def save(**kwargs):
|
||||
log_id = kwargs.get('id', '')
|
||||
op_log = OperateLog.objects.filter(pk=log_id).first()
|
||||
if op_log is not None:
|
||||
raw_after = op_log.after or {}
|
||||
raw_before = op_log.before or {}
|
||||
cur_before = kwargs.get('before') or {}
|
||||
cur_after = kwargs.get('after') or {}
|
||||
raw_before.update(cur_before)
|
||||
raw_after.update(cur_after)
|
||||
op_log.before = raw_before
|
||||
op_log.after = raw_after
|
||||
op_log.save()
|
||||
else:
|
||||
OperateLog.objects.create(**kwargs)
|
||||
|
||||
|
||||
class OperatorLogHandler(metaclass=Singleton):
|
||||
CACHE_KEY = 'OPERATOR_LOG_CACHE_KEY'
|
||||
|
||||
def __init__(self):
|
||||
self.log_client = self.get_storage_client()
|
||||
|
||||
@staticmethod
|
||||
def get_storage_client():
|
||||
client = get_operate_log_storage()
|
||||
return client
|
||||
|
||||
@staticmethod
|
||||
def _consistent_type_to_str(value1, value2):
|
||||
if isinstance(value1, datetime):
|
||||
value1 = as_current_tz(value1).strftime('%Y-%m-%d %H:%M:%S')
|
||||
if isinstance(value2, datetime):
|
||||
value2 = as_current_tz(value2).strftime('%Y-%m-%d %H:%M:%S')
|
||||
return value1, value2
|
||||
|
||||
def _look_for_two_dict_change(self, left_dict, right_dict):
|
||||
# 以右边的字典为基础
|
||||
before, after = {}, {}
|
||||
for key, value in right_dict.items():
|
||||
pre_value = left_dict.get(key, '')
|
||||
pre_value, value = self._consistent_type_to_str(pre_value, value)
|
||||
if sorted(str(value)) == sorted(str(pre_value)):
|
||||
continue
|
||||
if pre_value:
|
||||
before[key] = pre_value
|
||||
if value:
|
||||
after[key] = value
|
||||
return before, after
|
||||
|
||||
def cache_instance_before_data(self, instance_dict):
|
||||
instance_id = instance_dict.get('id')
|
||||
if instance_id is None:
|
||||
return
|
||||
|
||||
key = '%s_%s' % (self.CACHE_KEY, instance_id)
|
||||
cache.set(key, instance_dict, 3 * 60)
|
||||
|
||||
def get_instance_dict_from_cache(self, instance_id):
|
||||
if instance_id is None:
|
||||
return None
|
||||
|
||||
key = '%s_%s' % (self.CACHE_KEY, instance_id)
|
||||
cache_instance = cache.get(key, {})
|
||||
log_id = cache_instance.get('operate_log_id')
|
||||
return log_id, cache_instance
|
||||
|
||||
def get_instance_current_with_cache_diff(self, current_instance):
|
||||
log_id, before, after = None, None, None
|
||||
instance_id = current_instance.get('id')
|
||||
if instance_id is None:
|
||||
return log_id, before, after
|
||||
|
||||
log_id, cache_instance = self.get_instance_dict_from_cache(instance_id)
|
||||
if not cache_instance:
|
||||
return log_id, before, after
|
||||
|
||||
before, after = self._look_for_two_dict_change(
|
||||
cache_instance, current_instance
|
||||
)
|
||||
return log_id, before, after
|
||||
|
||||
@staticmethod
|
||||
def get_resource_display_from_setting(resource):
|
||||
resource_display = None
|
||||
setting_serializer = SettingsSerializer()
|
||||
label = setting_serializer.get_field_label(resource)
|
||||
if label is not None:
|
||||
resource_display = label
|
||||
return resource_display
|
||||
|
||||
def get_resource_display(self, resource):
|
||||
resource_display = str(resource)
|
||||
return_value = self.get_resource_display_from_setting(resource_display)
|
||||
if return_value is not None:
|
||||
resource_display = return_value
|
||||
return resource_display
|
||||
|
||||
def __data_processing(self, dict_item, loop=True):
|
||||
encrypt_value = '******'
|
||||
for key, value in dict_item.items():
|
||||
if isinstance(value, bool):
|
||||
value = _('Yes') if value else _('No')
|
||||
elif isinstance(value, (list, tuple)):
|
||||
value = ','.join(value)
|
||||
elif isinstance(value, dict) and loop:
|
||||
self.__data_processing(value, loop=False)
|
||||
if key in encrypted_field_set:
|
||||
value = encrypt_value
|
||||
dict_item[key] = value
|
||||
return dict_item
|
||||
|
||||
def data_processing(self, before, after):
|
||||
if before:
|
||||
before = self.__data_processing(before)
|
||||
if after:
|
||||
after = self.__data_processing(after)
|
||||
return before, after
|
||||
|
||||
def create_or_update_operate_log(
|
||||
self, action, resource_type, resource=None,
|
||||
force=False, log_id=None, before=None, after=None
|
||||
):
|
||||
user = current_request.user if current_request else None
|
||||
if not user or not user.is_authenticated:
|
||||
return
|
||||
|
||||
remote_addr = get_request_ip(current_request)
|
||||
resource_display = self.get_resource_display(resource)
|
||||
before, after = self.data_processing(before, after)
|
||||
if not force and not any([before, after]):
|
||||
# 前后都没变化,没必要生成日志,除非手动强制保存
|
||||
return
|
||||
|
||||
data = {
|
||||
'id': log_id, "user": str(user), 'action': action,
|
||||
'resource_type': str(resource_type), 'resource': resource_display,
|
||||
'remote_addr': remote_addr, 'before': before, 'after': after,
|
||||
'org_id': get_current_org_id(),
|
||||
}
|
||||
with transaction.atomic():
|
||||
if self.log_client.ping(timeout=1):
|
||||
client = self.log_client
|
||||
else:
|
||||
logger.info('Switch default operate log storage save.')
|
||||
client = get_operate_log_storage(default=True)
|
||||
|
||||
try:
|
||||
client.save(**data)
|
||||
except Exception as e:
|
||||
error_msg = 'An error occurred saving OperateLog.' \
|
||||
'Error: %s, Data: %s' % (e, data)
|
||||
logger.error(error_msg)
|
||||
|
||||
|
||||
op_handler = OperatorLogHandler()
|
||||
create_or_update_operate_log = op_handler.create_or_update_operate_log
|
||||
cache_instance_before_data = op_handler.cache_instance_before_data
|
||||
get_instance_current_with_cache_diff = op_handler.get_instance_current_with_cache_diff
|
||||
get_instance_dict_from_cache = op_handler.get_instance_dict_from_cache
|
|
@ -0,0 +1,24 @@
|
|||
# Generated by Django 3.2.14 on 2022-10-11 09:45
|
||||
|
||||
import common.db.encoder
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('audits', '0014_auto_20220505_1902'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name='operatelog',
|
||||
name='after',
|
||||
field=models.JSONField(default=dict, encoder=common.db.encoder.ModelJSONFieldEncoder, null=True),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='operatelog',
|
||||
name='before',
|
||||
field=models.JSONField(default=dict, encoder=common.db.encoder.ModelJSONFieldEncoder, null=True),
|
||||
),
|
||||
]
|
|
@ -4,8 +4,9 @@ from django.db import models
|
|||
from django.db.models import Q
|
||||
from django.utils.translation import gettext, ugettext_lazy as _
|
||||
from django.utils import timezone
|
||||
from common.utils import lazyproperty
|
||||
|
||||
from common.utils import lazyproperty
|
||||
from common.db.encoder import ModelJSONFieldEncoder
|
||||
from orgs.mixins.models import OrgModelMixin, Organization
|
||||
from orgs.utils import current_org
|
||||
|
||||
|
@ -65,6 +66,8 @@ class OperateLog(OrgModelMixin):
|
|||
resource = models.CharField(max_length=128, verbose_name=_("Resource"))
|
||||
remote_addr = models.CharField(max_length=128, verbose_name=_("Remote addr"), blank=True, null=True)
|
||||
datetime = models.DateTimeField(auto_now=True, verbose_name=_('Datetime'), db_index=True)
|
||||
before = models.JSONField(default=dict, encoder=ModelJSONFieldEncoder, null=True)
|
||||
after = models.JSONField(default=dict, encoder=ModelJSONFieldEncoder, null=True)
|
||||
|
||||
def __str__(self):
|
||||
return "<{}> {} <{}>".format(self.user, self.action, self.resource)
|
||||
|
@ -78,6 +81,21 @@ class OperateLog(OrgModelMixin):
|
|||
self.org_id = Organization.ROOT_ID
|
||||
return super(OperateLog, self).save(*args, **kwargs)
|
||||
|
||||
@classmethod
|
||||
def from_dict(cls, d):
|
||||
self = cls()
|
||||
for k, v in d.items():
|
||||
setattr(self, k, v)
|
||||
return self
|
||||
|
||||
@classmethod
|
||||
def from_multi_dict(cls, l):
|
||||
operate_logs = []
|
||||
for d in l:
|
||||
operate_log = cls.from_dict(d)
|
||||
operate_logs.append(operate_log)
|
||||
return operate_logs
|
||||
|
||||
class Meta:
|
||||
verbose_name = _("Operate log")
|
||||
|
||||
|
|
|
@ -47,6 +47,12 @@ class UserLoginLogSerializer(serializers.ModelSerializer):
|
|||
}
|
||||
|
||||
|
||||
class OperateLogActionDetailSerializer(serializers.ModelSerializer):
|
||||
class Meta:
|
||||
model = models.OperateLog
|
||||
fields = ('before', 'after')
|
||||
|
||||
|
||||
class OperateLogSerializer(serializers.ModelSerializer):
|
||||
action_display = serializers.CharField(source='get_action_display', label=_('Action'))
|
||||
|
||||
|
|
|
@ -1,9 +1,9 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
#
|
||||
import time
|
||||
import uuid
|
||||
|
||||
from django.db.models.signals import (
|
||||
post_save, m2m_changed, pre_delete
|
||||
post_save, m2m_changed, pre_delete, pre_save
|
||||
)
|
||||
from django.dispatch import receiver
|
||||
from django.conf import settings
|
||||
|
@ -16,24 +16,32 @@ from django.utils import translation
|
|||
from rest_framework.renderers import JSONRenderer
|
||||
from rest_framework.request import Request
|
||||
|
||||
from assets.models import Asset, SystemUser
|
||||
from users.models import User
|
||||
from assets.models import Asset, SystemUser, CommandFilter
|
||||
from terminal.models import Session, Command
|
||||
from perms.models import AssetPermission, ApplicationPermission
|
||||
from rbac.models import Role
|
||||
|
||||
from audits.utils import model_to_dict_for_operate_log as model_to_dict
|
||||
from audits.handler import (
|
||||
get_instance_current_with_cache_diff, cache_instance_before_data,
|
||||
create_or_update_operate_log, get_instance_dict_from_cache
|
||||
)
|
||||
from authentication.signals import post_auth_failed, post_auth_success
|
||||
from authentication.utils import check_different_city_login_if_need
|
||||
from jumpserver.utils import current_request
|
||||
from users.models import User
|
||||
from users.signals import post_user_change_password
|
||||
from terminal.models import Session, Command
|
||||
from .utils import write_login_log, create_operate_log
|
||||
from .utils import write_login_log
|
||||
from . import models, serializers
|
||||
from .models import OperateLog
|
||||
from orgs.utils import current_org
|
||||
from perms.models import AssetPermission, ApplicationPermission
|
||||
from .const import MODELS_NEED_RECORD
|
||||
from terminal.backends.command.serializers import SessionCommandSerializer
|
||||
from terminal.serializers import SessionSerializer
|
||||
from common.const.signals import POST_ADD, POST_REMOVE, POST_CLEAR
|
||||
from common.const.signals import POST_ADD, POST_REMOVE, POST_CLEAR, SKIP_SIGNAL
|
||||
from common.utils import get_request_ip, get_logger, get_syslogger
|
||||
from common.utils.encode import data_to_json
|
||||
|
||||
|
||||
logger = get_logger(__name__)
|
||||
sys_logger = get_syslogger(__name__)
|
||||
json_render = JSONRenderer()
|
||||
|
@ -62,70 +70,6 @@ class AuthBackendLabelMapping(LazyObject):
|
|||
|
||||
AUTH_BACKEND_LABEL_MAPPING = AuthBackendLabelMapping()
|
||||
|
||||
|
||||
M2M_NEED_RECORD = {
|
||||
User.groups.through._meta.object_name: (
|
||||
_('User and Group'),
|
||||
_('{User} JOINED {UserGroup}'),
|
||||
_('{User} LEFT {UserGroup}')
|
||||
),
|
||||
SystemUser.assets.through._meta.object_name: (
|
||||
_('Asset and SystemUser'),
|
||||
_('{Asset} ADD {SystemUser}'),
|
||||
_('{Asset} REMOVE {SystemUser}')
|
||||
),
|
||||
Asset.nodes.through._meta.object_name: (
|
||||
_('Node and Asset'),
|
||||
_('{Node} ADD {Asset}'),
|
||||
_('{Node} REMOVE {Asset}')
|
||||
),
|
||||
AssetPermission.users.through._meta.object_name: (
|
||||
_('User asset permissions'),
|
||||
_('{AssetPermission} ADD {User}'),
|
||||
_('{AssetPermission} REMOVE {User}'),
|
||||
),
|
||||
AssetPermission.user_groups.through._meta.object_name: (
|
||||
_('User group asset permissions'),
|
||||
_('{AssetPermission} ADD {UserGroup}'),
|
||||
_('{AssetPermission} REMOVE {UserGroup}'),
|
||||
),
|
||||
AssetPermission.assets.through._meta.object_name: (
|
||||
_('Asset permission'),
|
||||
_('{AssetPermission} ADD {Asset}'),
|
||||
_('{AssetPermission} REMOVE {Asset}'),
|
||||
),
|
||||
AssetPermission.nodes.through._meta.object_name: (
|
||||
_('Node permission'),
|
||||
_('{AssetPermission} ADD {Node}'),
|
||||
_('{AssetPermission} REMOVE {Node}'),
|
||||
),
|
||||
AssetPermission.system_users.through._meta.object_name: (
|
||||
_('Asset permission and SystemUser'),
|
||||
_('{AssetPermission} ADD {SystemUser}'),
|
||||
_('{AssetPermission} REMOVE {SystemUser}'),
|
||||
),
|
||||
ApplicationPermission.users.through._meta.object_name: (
|
||||
_('User application permissions'),
|
||||
_('{ApplicationPermission} ADD {User}'),
|
||||
_('{ApplicationPermission} REMOVE {User}'),
|
||||
),
|
||||
ApplicationPermission.user_groups.through._meta.object_name: (
|
||||
_('User group application permissions'),
|
||||
_('{ApplicationPermission} ADD {UserGroup}'),
|
||||
_('{ApplicationPermission} REMOVE {UserGroup}'),
|
||||
),
|
||||
ApplicationPermission.applications.through._meta.object_name: (
|
||||
_('Application permission'),
|
||||
_('{ApplicationPermission} ADD {Application}'),
|
||||
_('{ApplicationPermission} REMOVE {Application}'),
|
||||
),
|
||||
ApplicationPermission.system_users.through._meta.object_name: (
|
||||
_('Application permission and SystemUser'),
|
||||
_('{ApplicationPermission} ADD {SystemUser}'),
|
||||
_('{ApplicationPermission} REMOVE {SystemUser}'),
|
||||
),
|
||||
}
|
||||
|
||||
M2M_ACTION = {
|
||||
POST_ADD: OperateLog.ACTION_CREATE,
|
||||
POST_REMOVE: OperateLog.ACTION_DELETE,
|
||||
|
@ -137,60 +81,115 @@ M2M_ACTION = {
|
|||
def on_m2m_changed(sender, action, instance, reverse, model, pk_set, **kwargs):
|
||||
if action not in M2M_ACTION:
|
||||
return
|
||||
|
||||
user = current_request.user if current_request else None
|
||||
if not user or not user.is_authenticated:
|
||||
if not instance:
|
||||
return
|
||||
|
||||
sender_name = sender._meta.object_name
|
||||
if sender_name in M2M_NEED_RECORD:
|
||||
org_id = current_org.id
|
||||
remote_addr = get_request_ip(current_request)
|
||||
user = str(user)
|
||||
resource_type, resource_tmpl_add, resource_tmpl_remove = M2M_NEED_RECORD[sender_name]
|
||||
action = M2M_ACTION[action]
|
||||
if action == OperateLog.ACTION_CREATE:
|
||||
resource_tmpl = resource_tmpl_add
|
||||
elif action == OperateLog.ACTION_DELETE:
|
||||
resource_tmpl = resource_tmpl_remove
|
||||
resource_type = instance._meta.verbose_name
|
||||
current_instance = model_to_dict(instance, include_model_fields=False)
|
||||
|
||||
to_create = []
|
||||
objs = model.objects.filter(pk__in=pk_set)
|
||||
instance_id = current_instance.get('id')
|
||||
log_id, before_instance = get_instance_dict_from_cache(instance_id)
|
||||
|
||||
instance_name = instance._meta.object_name
|
||||
instance_value = str(instance)
|
||||
field_name = str(model._meta.verbose_name)
|
||||
objs = model.objects.filter(pk__in=pk_set)
|
||||
objs_display = [str(o) for o in objs]
|
||||
action = M2M_ACTION[action]
|
||||
changed_field = current_instance.get(field_name, [])
|
||||
|
||||
model_name = model._meta.object_name
|
||||
after, before, before_value = None, None, None
|
||||
if action == OperateLog.ACTION_CREATE:
|
||||
before_value = list(set(changed_field) - set(objs_display))
|
||||
elif action == OperateLog.ACTION_DELETE:
|
||||
before_value = list(
|
||||
set(changed_field).symmetric_difference(set(objs_display))
|
||||
)
|
||||
|
||||
for obj in objs:
|
||||
resource = resource_tmpl.format(**{
|
||||
instance_name: instance_value,
|
||||
model_name: str(obj)
|
||||
})[:128] # `resource` 字段只有 128 个字符长 😔
|
||||
if changed_field:
|
||||
after = {field_name: changed_field}
|
||||
if before_value:
|
||||
before = {field_name: before_value}
|
||||
|
||||
to_create.append(OperateLog(
|
||||
user=user, action=action, resource_type=resource_type,
|
||||
resource=resource, remote_addr=remote_addr, org_id=org_id
|
||||
))
|
||||
OperateLog.objects.bulk_create(to_create)
|
||||
if sorted(str(before)) == sorted(str(after)):
|
||||
return
|
||||
|
||||
create_or_update_operate_log(
|
||||
OperateLog.ACTION_UPDATE, resource_type,
|
||||
resource=instance, log_id=log_id, before=before, after=after
|
||||
)
|
||||
|
||||
|
||||
def signal_of_operate_log_whether_continue(sender, instance, created, update_fields=None):
|
||||
condition = True
|
||||
if not instance:
|
||||
condition = False
|
||||
if instance and getattr(instance, SKIP_SIGNAL, False):
|
||||
condition = False
|
||||
# 终端模型的 create 事件由系统产生,不记录
|
||||
if instance._meta.object_name == 'Terminal' and created:
|
||||
condition = False
|
||||
# last_login 改变是最后登录日期, 每次登录都会改变
|
||||
if instance._meta.object_name == 'User' and \
|
||||
update_fields and 'last_login' in update_fields:
|
||||
condition = False
|
||||
# 不在记录白名单中,跳过
|
||||
if sender._meta.object_name not in MODELS_NEED_RECORD:
|
||||
condition = False
|
||||
return condition
|
||||
|
||||
|
||||
@receiver(pre_save)
|
||||
def on_object_pre_create_or_update(sender, instance=None, raw=False, using=None, update_fields=None, **kwargs):
|
||||
ok = signal_of_operate_log_whether_continue(
|
||||
sender, instance, False, update_fields
|
||||
)
|
||||
if not ok:
|
||||
return
|
||||
instance_before_data = {'id': instance.id}
|
||||
raw_instance = type(instance).objects.filter(pk=instance.id).first()
|
||||
if raw_instance:
|
||||
instance_before_data = model_to_dict(raw_instance)
|
||||
operate_log_id = str(uuid.uuid4())
|
||||
instance_before_data['operate_log_id'] = operate_log_id
|
||||
setattr(instance, 'operate_log_id', operate_log_id)
|
||||
cache_instance_before_data(instance_before_data)
|
||||
|
||||
|
||||
@receiver(post_save)
|
||||
def on_object_created_or_update(sender, instance=None, created=False, update_fields=None, **kwargs):
|
||||
# last_login 改变是最后登录日期, 每次登录都会改变
|
||||
if instance._meta.object_name == 'User' and \
|
||||
update_fields and 'last_login' in update_fields:
|
||||
ok = signal_of_operate_log_whether_continue(
|
||||
sender, instance, created, update_fields
|
||||
)
|
||||
if not ok:
|
||||
return
|
||||
|
||||
log_id, before, after = None, None, None
|
||||
if created:
|
||||
action = models.OperateLog.ACTION_CREATE
|
||||
after = model_to_dict(instance)
|
||||
log_id = getattr(instance, 'operate_log_id', None)
|
||||
else:
|
||||
action = models.OperateLog.ACTION_UPDATE
|
||||
create_operate_log(action, sender, instance)
|
||||
current_instance = model_to_dict(instance)
|
||||
log_id, before, after = get_instance_current_with_cache_diff(current_instance)
|
||||
|
||||
resource_type = sender._meta.verbose_name
|
||||
create_or_update_operate_log(
|
||||
action, resource_type, resource=instance,
|
||||
log_id=log_id, before=before, after=after
|
||||
)
|
||||
|
||||
|
||||
@receiver(pre_delete)
|
||||
def on_object_delete(sender, instance=None, **kwargs):
|
||||
create_operate_log(models.OperateLog.ACTION_DELETE, sender, instance)
|
||||
ok = signal_of_operate_log_whether_continue(sender, instance, False)
|
||||
if not ok:
|
||||
return
|
||||
|
||||
resource_type = sender._meta.verbose_name
|
||||
create_or_update_operate_log(
|
||||
models.OperateLog.ACTION_DELETE, resource_type,
|
||||
resource=instance, before=model_to_dict(instance)
|
||||
)
|
||||
|
||||
|
||||
@receiver(post_user_change_password, sender=User)
|
||||
|
|
|
@ -1,14 +1,15 @@
|
|||
import csv
|
||||
import codecs
|
||||
|
||||
from django.http import HttpResponse
|
||||
from django.db import transaction
|
||||
from django.utils import translation
|
||||
from itertools import chain
|
||||
|
||||
from audits.models import OperateLog
|
||||
from common.utils import validate_ip, get_ip_city, get_request_ip, get_logger
|
||||
from jumpserver.utils import current_request
|
||||
from .const import DEFAULT_CITY, MODELS_NEED_RECORD
|
||||
from django.http import HttpResponse
|
||||
from django.db import models
|
||||
|
||||
from settings.serializers import SettingsSerializer
|
||||
from common.utils import validate_ip, get_ip_city, get_logger
|
||||
from common.db import fields
|
||||
from .const import DEFAULT_CITY
|
||||
|
||||
|
||||
logger = get_logger(__name__)
|
||||
|
@ -46,23 +47,60 @@ def write_login_log(*args, **kwargs):
|
|||
UserLoginLog.objects.create(**kwargs)
|
||||
|
||||
|
||||
def create_operate_log(action, sender, resource):
|
||||
user = current_request.user if current_request else None
|
||||
if not user or not user.is_authenticated:
|
||||
return
|
||||
model_name = sender._meta.object_name
|
||||
if model_name not in MODELS_NEED_RECORD:
|
||||
return
|
||||
with translation.override('en'):
|
||||
resource_type = sender._meta.verbose_name
|
||||
remote_addr = get_request_ip(current_request)
|
||||
def get_resource_display(resource):
|
||||
resource_display = str(resource)
|
||||
setting_serializer = SettingsSerializer()
|
||||
label = setting_serializer.get_field_label(resource_display)
|
||||
if label is not None:
|
||||
resource_display = label
|
||||
return resource_display
|
||||
|
||||
data = {
|
||||
"user": str(user), 'action': action, 'resource_type': resource_type,
|
||||
'resource': str(resource), 'remote_addr': remote_addr,
|
||||
}
|
||||
with transaction.atomic():
|
||||
try:
|
||||
OperateLog.objects.create(**data)
|
||||
except Exception as e:
|
||||
logger.error("Create operate log error: {}".format(e))
|
||||
|
||||
def model_to_dict_for_operate_log(
|
||||
instance, include_model_fields=True, include_related_fields=True
|
||||
):
|
||||
need_continue_fields = ['date_updated']
|
||||
opts = instance._meta
|
||||
data = {}
|
||||
for f in chain(opts.concrete_fields, opts.private_fields):
|
||||
if isinstance(f, (models.FileField, models.ImageField)):
|
||||
continue
|
||||
|
||||
if getattr(f, 'attname', None) in need_continue_fields:
|
||||
continue
|
||||
|
||||
value = getattr(instance, f.name) or getattr(instance, f.attname)
|
||||
if not isinstance(value, bool) and not value:
|
||||
continue
|
||||
|
||||
if getattr(f, 'primary_key', False):
|
||||
f.verbose_name = 'id'
|
||||
elif isinstance(f, (
|
||||
fields.EncryptCharField, fields.EncryptTextField,
|
||||
fields.EncryptJsonDictCharField, fields.EncryptJsonDictTextField
|
||||
)) or getattr(f, 'attname', '') == 'password':
|
||||
value = 'encrypt|%s' % value
|
||||
elif isinstance(value, list):
|
||||
value = [str(v) for v in value]
|
||||
|
||||
if include_model_fields or getattr(f, 'primary_key', False):
|
||||
data[str(f.verbose_name)] = value
|
||||
|
||||
if include_related_fields:
|
||||
for f in chain(opts.many_to_many, opts.related_objects):
|
||||
value = []
|
||||
if instance.pk is not None:
|
||||
related_name = getattr(f, 'attname', '') or getattr(f, 'related_name', '')
|
||||
if related_name:
|
||||
try:
|
||||
value = [str(i) for i in getattr(instance, related_name).all()]
|
||||
except:
|
||||
pass
|
||||
if not value:
|
||||
continue
|
||||
try:
|
||||
field_key = getattr(f, 'verbose_name', None) or f.related_model._meta.verbose_name
|
||||
data[str(field_key)] = value
|
||||
except:
|
||||
pass
|
||||
return data
|
||||
|
|
|
@ -88,7 +88,7 @@ class ConnectionTokenMixin:
|
|||
filename, ssh_token = self.get_ssh_token(token)
|
||||
else:
|
||||
raise ValueError('Protocol not support: {}'.format(protocol))
|
||||
|
||||
filename = urllib.parse.unquote(filename)
|
||||
return {
|
||||
"filename": filename,
|
||||
"protocol": protocol,
|
||||
|
|
|
@ -1,13 +1,73 @@
|
|||
from rest_framework.generics import CreateAPIView
|
||||
from rest_framework.response import Response
|
||||
from rest_framework.permissions import AllowAny
|
||||
from django.utils.translation import ugettext as _
|
||||
from django.template.loader import render_to_string
|
||||
|
||||
from authentication.serializers import PasswordVerifySerializer
|
||||
from common.utils.verify_code import SendAndVerifyCodeUtil
|
||||
from common.permissions import IsValidUser
|
||||
from common.utils.random import random_string
|
||||
from common.utils import get_object_or_none
|
||||
from authentication.serializers import (
|
||||
PasswordVerifySerializer, ResetPasswordCodeSerializer
|
||||
)
|
||||
from settings.utils import get_login_title
|
||||
from users.models import User
|
||||
from authentication.mixins import authenticate
|
||||
from authentication.errors import PasswordInvalid
|
||||
from authentication.mixins import AuthMixin
|
||||
|
||||
|
||||
class UserResetPasswordSendCodeApi(CreateAPIView):
|
||||
permission_classes = (AllowAny,)
|
||||
serializer_class = ResetPasswordCodeSerializer
|
||||
|
||||
@staticmethod
|
||||
def is_valid_user( **kwargs):
|
||||
user = get_object_or_none(User, **kwargs)
|
||||
if not user:
|
||||
err_msg = _('User does not exist: {}').format(_("No user matched"))
|
||||
return None, err_msg
|
||||
if not user.is_local:
|
||||
err_msg = _(
|
||||
'The user is from {}, please go to the corresponding system to change the password'
|
||||
).format(user.get_source_display())
|
||||
return None, err_msg
|
||||
return user, None
|
||||
|
||||
def create(self, request, *args, **kwargs):
|
||||
serializer = self.get_serializer(data=request.data)
|
||||
serializer.is_valid(raise_exception=True)
|
||||
form_type = serializer.validated_data['form_type']
|
||||
username = serializer.validated_data['username']
|
||||
code = random_string(6, lower=False, upper=False)
|
||||
other_args = {}
|
||||
|
||||
if form_type == 'phone':
|
||||
backend = 'sms'
|
||||
target = serializer.validated_data['phone']
|
||||
user, err = self.is_valid_user(username=username, phone=target)
|
||||
if not user:
|
||||
return Response({'error': err}, status=400)
|
||||
else:
|
||||
backend = 'email'
|
||||
target = serializer.validated_data['email']
|
||||
user, err = self.is_valid_user(username=username, email=target)
|
||||
if not user:
|
||||
return Response({'error': err}, status=400)
|
||||
|
||||
subject = '%s: %s' % (get_login_title(), _('Forgot password'))
|
||||
context = {
|
||||
'user': user, 'title': subject, 'code': code,
|
||||
}
|
||||
message = render_to_string('authentication/_msg_reset_password_code.html', context)
|
||||
other_args['subject'] = subject
|
||||
other_args['message'] = message
|
||||
|
||||
SendAndVerifyCodeUtil(target, code, backend=backend, **other_args).gen_and_send_async()
|
||||
return Response({'data': 'ok'}, status=200)
|
||||
|
||||
|
||||
class UserPasswordVerifyApi(AuthMixin, CreateAPIView):
|
||||
permission_classes = (IsValidUser,)
|
||||
serializer_class = PasswordVerifySerializer
|
||||
|
|
|
@ -2,7 +2,7 @@ from django.db.models import TextChoices
|
|||
|
||||
from authentication.confirm import CONFIRM_BACKENDS
|
||||
from .confirm import ConfirmMFA, ConfirmPassword, ConfirmReLogin
|
||||
from .mfa import MFAOtp, MFASms, MFARadius
|
||||
from .mfa import MFAOtp, MFASms, MFARadius, MFACustom
|
||||
|
||||
RSA_PRIVATE_KEY = 'rsa_private_key'
|
||||
RSA_PUBLIC_KEY = 'rsa_public_key'
|
||||
|
@ -35,3 +35,4 @@ class MFAType(TextChoices):
|
|||
OTP = MFAOtp.name, MFAOtp.display_name
|
||||
SMS = MFASms.name, MFASms.display_name
|
||||
Radius = MFARadius.name, MFARadius.display_name
|
||||
Custom = MFACustom.name, MFACustom.display_name
|
||||
|
|
|
@ -1,5 +1,4 @@
|
|||
from .otp import MFAOtp, otp_failed_msg
|
||||
from .sms import MFASms
|
||||
from .radius import MFARadius
|
||||
|
||||
MFA_BACKENDS = [MFAOtp, MFASms, MFARadius]
|
||||
from .custom import MFACustom
|
||||
|
|
|
@ -0,0 +1,59 @@
|
|||
from django.conf import settings
|
||||
from django.utils.module_loading import import_string
|
||||
from django.utils.translation import ugettext_lazy as _
|
||||
|
||||
from common.utils import get_logger
|
||||
from .base import BaseMFA
|
||||
|
||||
logger = get_logger(__file__)
|
||||
|
||||
mfa_custom_method = None
|
||||
|
||||
if settings.MFA_CUSTOM:
|
||||
""" 保证自定义认证方法在服务运行时不能被更改,只在第一次调用时加载一次 """
|
||||
try:
|
||||
mfa_custom_method_path = 'data.mfa.main.check_code'
|
||||
mfa_custom_method = import_string(mfa_custom_method_path)
|
||||
except Exception as e:
|
||||
logger.warning('Import custom auth method failed: {}, Maybe not enabled'.format(e))
|
||||
|
||||
custom_failed_msg = _("MFA Custom code invalid")
|
||||
|
||||
|
||||
class MFACustom(BaseMFA):
|
||||
name = 'mfa_custom'
|
||||
display_name = 'Custom'
|
||||
placeholder = _("MFA custom verification code")
|
||||
|
||||
def check_code(self, code):
|
||||
assert self.is_authenticated()
|
||||
ok = False
|
||||
try:
|
||||
ok = mfa_custom_method(user=self.user, code=code)
|
||||
except Exception as exc:
|
||||
logger.error('Custom authenticate error: {}'.format(exc))
|
||||
msg = '' if ok else custom_failed_msg
|
||||
return ok, msg
|
||||
|
||||
def is_active(self):
|
||||
return True
|
||||
|
||||
@staticmethod
|
||||
def global_enabled():
|
||||
return settings.MFA_CUSTOM and callable(mfa_custom_method)
|
||||
|
||||
def get_enable_url(self) -> str:
|
||||
return ''
|
||||
|
||||
def can_disable(self):
|
||||
return False
|
||||
|
||||
def disable(self):
|
||||
return ''
|
||||
|
||||
@staticmethod
|
||||
def help_text_of_disable():
|
||||
return _("MFA custom global enabled, cannot disable")
|
||||
|
||||
def get_disable_url(self) -> str:
|
||||
return ''
|
|
@ -2,7 +2,7 @@ from django.utils.translation import ugettext_lazy as _
|
|||
from django.conf import settings
|
||||
|
||||
from .base import BaseMFA
|
||||
from common.sdk.sms import SendAndVerifySMSUtil
|
||||
from common.utils.verify_code import SendAndVerifyCodeUtil
|
||||
|
||||
sms_failed_msg = _("SMS verify code invalid")
|
||||
|
||||
|
@ -15,7 +15,7 @@ class MFASms(BaseMFA):
|
|||
def __init__(self, user):
|
||||
super().__init__(user)
|
||||
phone = user.phone if self.is_authenticated() else ''
|
||||
self.sms = SendAndVerifySMSUtil(phone)
|
||||
self.sms = SendAndVerifyCodeUtil(phone, backend=self.name)
|
||||
|
||||
def check_code(self, code):
|
||||
assert self.is_authenticated()
|
||||
|
@ -37,7 +37,7 @@ class MFASms(BaseMFA):
|
|||
return True
|
||||
|
||||
def send_challenge(self):
|
||||
self.sms.gen_and_send()
|
||||
self.sms.gen_and_send_async()
|
||||
|
||||
@staticmethod
|
||||
def global_enabled():
|
||||
|
|
|
@ -1,5 +1,6 @@
|
|||
# Generated by Django 2.1.7 on 2019-02-28 08:07
|
||||
|
||||
import common.db.models
|
||||
from django.conf import settings
|
||||
from django.db import migrations, models
|
||||
import django.db.models.deletion
|
||||
|
@ -27,7 +28,7 @@ class Migration(migrations.Migration):
|
|||
models.UUIDField(default=uuid.uuid4, editable=False,
|
||||
verbose_name='AccessKeySecret')),
|
||||
('user', models.ForeignKey(
|
||||
on_delete=django.db.models.deletion.CASCADE,
|
||||
on_delete=common.db.models.CASCADE_SIGNAL_SKIP,
|
||||
related_name='access_keys',
|
||||
to=settings.AUTH_USER_MODEL, verbose_name='User')),
|
||||
],
|
||||
|
|
|
@ -15,7 +15,7 @@ class Migration(migrations.Migration):
|
|||
migrations.AddField(
|
||||
model_name='accesskey',
|
||||
name='date_created',
|
||||
field=models.DateTimeField(auto_now_add=True, default=datetime.datetime(2019, 7, 29, 6, 23, 54, 115123, tzinfo=utc)),
|
||||
field=models.DateTimeField(auto_now_add=True, default=datetime.datetime(2019, 7, 29, 6, 23, 54, 115123, tzinfo=utc), verbose_name='Date created'),
|
||||
preserve_default=False,
|
||||
),
|
||||
migrations.AddField(
|
||||
|
|
|
@ -1,8 +1,8 @@
|
|||
# Generated by Django 3.1.13 on 2021-12-27 02:59
|
||||
|
||||
import common.db.models
|
||||
from django.conf import settings
|
||||
from django.db import migrations, models
|
||||
import django.db.models.deletion
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
@ -16,6 +16,6 @@ class Migration(migrations.Migration):
|
|||
migrations.AlterField(
|
||||
model_name='ssotoken',
|
||||
name='user',
|
||||
field=models.ForeignKey(db_constraint=False, on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL, verbose_name='User'),
|
||||
field=models.ForeignKey(db_constraint=False, on_delete=common.db.models.CASCADE_SIGNAL_SKIP, to=settings.AUTH_USER_MODEL, verbose_name='User'),
|
||||
),
|
||||
]
|
||||
|
|
|
@ -16,10 +16,10 @@ class AccessKey(models.Model):
|
|||
default=uuid.uuid4, editable=False)
|
||||
secret = models.UUIDField(verbose_name='AccessKeySecret',
|
||||
default=uuid.uuid4, editable=False)
|
||||
user = models.ForeignKey(settings.AUTH_USER_MODEL, verbose_name='User',
|
||||
on_delete=models.CASCADE, related_name='access_keys')
|
||||
user = models.ForeignKey(settings.AUTH_USER_MODEL, verbose_name=_('User'),
|
||||
on_delete=models.CASCADE_SIGNAL_SKIP, related_name='access_keys')
|
||||
is_active = models.BooleanField(default=True, verbose_name=_('Active'))
|
||||
date_created = models.DateTimeField(auto_now_add=True)
|
||||
date_created = models.DateTimeField(auto_now_add=True, verbose_name=_('Date created'))
|
||||
|
||||
def get_id(self):
|
||||
return str(self.id)
|
||||
|
@ -51,7 +51,7 @@ class SSOToken(models.JMSBaseModel):
|
|||
"""
|
||||
authkey = models.UUIDField(primary_key=True, default=uuid.uuid4, verbose_name=_('Token'))
|
||||
expired = models.BooleanField(default=False, verbose_name=_('Expired'))
|
||||
user = models.ForeignKey('users.User', on_delete=models.CASCADE, verbose_name=_('User'), db_constraint=False)
|
||||
user = models.ForeignKey('users.User', on_delete=models.CASCADE_SIGNAL_SKIP, verbose_name=_('User'), db_constraint=False)
|
||||
|
||||
class Meta:
|
||||
verbose_name = _('SSO token')
|
||||
|
|
|
@ -1,15 +1,41 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
#
|
||||
from django.utils.translation import ugettext_lazy as _
|
||||
from rest_framework import serializers
|
||||
|
||||
from common.drf.fields import EncryptedField
|
||||
|
||||
__all__ = [
|
||||
'MFAChallengeSerializer', 'MFASelectTypeSerializer',
|
||||
'PasswordVerifySerializer',
|
||||
'PasswordVerifySerializer', 'ResetPasswordCodeSerializer',
|
||||
]
|
||||
|
||||
|
||||
class ResetPasswordCodeSerializer(serializers.Serializer):
|
||||
form_type = serializers.CharField(default='email')
|
||||
username = serializers.CharField()
|
||||
email = serializers.CharField(allow_blank=True)
|
||||
phone = serializers.CharField(allow_blank=True)
|
||||
|
||||
def create(self, attrs):
|
||||
error = []
|
||||
form_type = attrs.get('form_type', 'email')
|
||||
username = attrs.get('username')
|
||||
if not username:
|
||||
error.append(_('The {} cannot be empty').format(_('Username')))
|
||||
if form_type == 'phone':
|
||||
phone = attrs.get('phone')
|
||||
if not phone:
|
||||
error.append(_('The {} cannot be empty').format(_('Phone')))
|
||||
else:
|
||||
email = attrs.get('email')
|
||||
if not email:
|
||||
error.append(_('The {} cannot be empty').format(_('Email')))
|
||||
|
||||
if error:
|
||||
raise serializers.ValidationError(error)
|
||||
|
||||
|
||||
class PasswordVerifySerializer(serializers.Serializer):
|
||||
password = EncryptedField()
|
||||
|
||||
|
|
|
@ -0,0 +1,21 @@
|
|||
{% load i18n %}
|
||||
|
||||
<div style="width: 100%; text-align: center">
|
||||
<table style="margin: 0 auto; border: 1px solid #ccc; border-collapse: collapse; width: 60%">
|
||||
<tr style="background-color: #1ab394; color: white">
|
||||
<th style="height: 80px;">{{ title }}</th>
|
||||
</tr>
|
||||
<tr style="border: 1px solid #eee;">
|
||||
<td style="height: 50px;">{% trans 'Hello' %} {{ user.name }},</td>
|
||||
</tr>
|
||||
<tr style="border: 1px solid #eee">
|
||||
<td style="height: 50px;">{% trans 'Verify code' %}: <span style="font-weight: bold;">{{ code }}</span></td>
|
||||
</tr>
|
||||
<tr style="border: 1px solid #eee;">
|
||||
<td style="height: 30px;"> {% trans 'Copy the verification code to the Reset Password page to reset the password.' %} </td>
|
||||
</tr>
|
||||
<tr style="border: 1px solid #eee">
|
||||
<td style="height: 30px;">{% trans 'The validity period of the verification code is one minute' %}</td>
|
||||
</tr>
|
||||
</table>
|
||||
</div>
|
|
@ -32,7 +32,8 @@ urlpatterns = [
|
|||
path('mfa/verify/', api.MFAChallengeVerifyApi.as_view(), name='mfa-verify'),
|
||||
path('mfa/challenge/', api.MFAChallengeVerifyApi.as_view(), name='mfa-challenge'),
|
||||
path('mfa/select/', api.MFASendCodeApi.as_view(), name='mfa-select'),
|
||||
path('mfa/send-code/', api.MFASendCodeApi.as_view(), name='mfa-send-codej'),
|
||||
path('mfa/send-code/', api.MFASendCodeApi.as_view(), name='mfa-send-code'),
|
||||
path('password/reset-code/', api.UserResetPasswordSendCodeApi.as_view(), name='reset-password-code'),
|
||||
path('password/verify/', api.UserPasswordVerifyApi.as_view(), name='user-password-verify'),
|
||||
path('login-confirm-ticket/status/', api.TicketStatusApi.as_view(), name='login-confirm-ticket-status'),
|
||||
]
|
||||
|
|
|
@ -15,3 +15,5 @@ POST_CLEAR = 'post_clear'
|
|||
|
||||
POST_PREFIX = 'post'
|
||||
PRE_PREFIX = 'pre'
|
||||
|
||||
SKIP_SIGNAL = 'skip_signal'
|
||||
|
|
|
@ -6,6 +6,7 @@ from django.utils.translation import ugettext_lazy as _
|
|||
from django.utils.encoding import force_text
|
||||
from django.core.validators import MinValueValidator, MaxValueValidator
|
||||
from common.utils import signer, crypto
|
||||
from common.local import add_encrypted_field_set
|
||||
|
||||
|
||||
__all__ = [
|
||||
|
@ -149,6 +150,10 @@ class EncryptMixin:
|
|||
class EncryptTextField(EncryptMixin, models.TextField):
|
||||
description = _("Encrypt field using Secret Key")
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
super().__init__(*args, **kwargs)
|
||||
add_encrypted_field_set(self.verbose_name)
|
||||
|
||||
|
||||
class EncryptCharField(EncryptMixin, models.CharField):
|
||||
@staticmethod
|
||||
|
@ -163,6 +168,7 @@ class EncryptCharField(EncryptMixin, models.CharField):
|
|||
def __init__(self, *args, **kwargs):
|
||||
self.change_max_length(kwargs)
|
||||
super().__init__(*args, **kwargs)
|
||||
add_encrypted_field_set(self.verbose_name)
|
||||
|
||||
def deconstruct(self):
|
||||
name, path, args, kwargs = super().deconstruct()
|
||||
|
@ -174,11 +180,15 @@ class EncryptCharField(EncryptMixin, models.CharField):
|
|||
|
||||
|
||||
class EncryptJsonDictTextField(EncryptMixin, JsonDictTextField):
|
||||
pass
|
||||
def __init__(self, *args, **kwargs):
|
||||
super().__init__(*args, **kwargs)
|
||||
add_encrypted_field_set(self.verbose_name)
|
||||
|
||||
|
||||
class EncryptJsonDictCharField(EncryptMixin, JsonDictCharField):
|
||||
pass
|
||||
def __init__(self, *args, **kwargs):
|
||||
super().__init__(*args, **kwargs)
|
||||
add_encrypted_field_set(self.verbose_name)
|
||||
|
||||
|
||||
class PortField(models.IntegerField):
|
||||
|
|
|
@ -19,6 +19,8 @@ from django.db.models import QuerySet
|
|||
from django.db.models.functions import Concat
|
||||
from django.utils.translation import ugettext_lazy as _
|
||||
|
||||
from ..const.signals import SKIP_SIGNAL
|
||||
|
||||
|
||||
class Choice(str):
|
||||
def __new__(cls, value, label=''): # `deepcopy` 的时候不会传 `label`
|
||||
|
@ -124,6 +126,9 @@ class JMSModel(JMSBaseModel):
|
|||
class Meta:
|
||||
abstract = True
|
||||
|
||||
def __str__(self):
|
||||
return str(self.id)
|
||||
|
||||
|
||||
def concated_display(name1, name2):
|
||||
return Concat(F(name1), Value('('), F(name2), Value(')'))
|
||||
|
@ -238,3 +243,14 @@ class MultiTableChildQueryset(QuerySet):
|
|||
self._batched_insert(objs, self.model._meta.local_fields, batch_size)
|
||||
|
||||
return objs
|
||||
|
||||
|
||||
def CASCADE_SIGNAL_SKIP(collector, field, sub_objs, using):
|
||||
# 级联删除时,操作日志标记不保存,以免用户混淆
|
||||
try:
|
||||
for obj in sub_objs:
|
||||
setattr(obj, SKIP_SIGNAL, True)
|
||||
except:
|
||||
pass
|
||||
|
||||
CASCADE(collector, field, sub_objs, using)
|
||||
|
|
|
@ -4,6 +4,7 @@
|
|||
from rest_framework import serializers
|
||||
|
||||
from common.utils import decrypt_password
|
||||
from common.local import add_encrypted_field_set
|
||||
|
||||
__all__ = [
|
||||
'ReadableHiddenField', 'EncryptedField'
|
||||
|
@ -32,6 +33,7 @@ class EncryptedField(serializers.CharField):
|
|||
write_only = True
|
||||
kwargs['write_only'] = write_only
|
||||
super().__init__(**kwargs)
|
||||
add_encrypted_field_set(self.label)
|
||||
|
||||
def to_internal_value(self, value):
|
||||
value = super().to_internal_value(value)
|
||||
|
|
|
@ -1,7 +1,13 @@
|
|||
from werkzeug.local import Local
|
||||
|
||||
thread_local = Local()
|
||||
encrypted_field_set = set()
|
||||
|
||||
|
||||
def _find(attr):
|
||||
return getattr(thread_local, attr, None)
|
||||
|
||||
|
||||
def add_encrypted_field_set(label):
|
||||
if label:
|
||||
encrypted_field_set.add(str(label))
|
||||
|
|
|
@ -7,7 +7,7 @@ from rest_framework import permissions
|
|||
from rest_framework.request import Request
|
||||
|
||||
from common.exceptions import UserConfirmRequired
|
||||
from audits.utils import create_operate_log
|
||||
from audits.handler import create_or_update_operate_log
|
||||
from audits.models import OperateLog
|
||||
|
||||
__all__ = ["PermissionsMixin", "RecordViewLogMixin", "UserConfirmRequiredExceptionMixin"]
|
||||
|
@ -62,10 +62,18 @@ class RecordViewLogMixin:
|
|||
def list(self, request, *args, **kwargs):
|
||||
response = super().list(request, *args, **kwargs)
|
||||
resource = self.get_resource_display(request)
|
||||
create_operate_log(self.ACTION, self.model, resource)
|
||||
resource_type = self.model._meta.verbose_name
|
||||
create_or_update_operate_log(
|
||||
self.ACTION, resource_type, force=True,
|
||||
resource=resource
|
||||
)
|
||||
return response
|
||||
|
||||
def retrieve(self, request, *args, **kwargs):
|
||||
response = super().retrieve(request, *args, **kwargs)
|
||||
create_operate_log(self.ACTION, self.model, self.get_object())
|
||||
resource_type = self.model._meta.verbose_name
|
||||
create_or_update_operate_log(
|
||||
self.ACTION, resource_type, force=True,
|
||||
resource=self.get_object()
|
||||
)
|
||||
return response
|
||||
|
|
|
@ -0,0 +1,428 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
#
|
||||
import datetime
|
||||
import inspect
|
||||
|
||||
from collections.abc import Iterable
|
||||
from functools import reduce, partial
|
||||
from itertools import groupby
|
||||
from uuid import UUID
|
||||
|
||||
from django.utils.translation import gettext_lazy as _
|
||||
from django.db.models import QuerySet as DJQuerySet
|
||||
from elasticsearch import Elasticsearch
|
||||
from elasticsearch.helpers import bulk
|
||||
from elasticsearch.exceptions import RequestError, NotFoundError
|
||||
|
||||
from common.utils.common import lazyproperty
|
||||
from common.utils import get_logger
|
||||
from common.utils.timezone import local_now_date_display
|
||||
from common.exceptions import JMSException
|
||||
|
||||
|
||||
logger = get_logger(__file__)
|
||||
|
||||
|
||||
class InvalidElasticsearch(JMSException):
|
||||
default_code = 'invalid_elasticsearch'
|
||||
default_detail = _('Invalid elasticsearch config')
|
||||
|
||||
|
||||
class NotSupportElasticsearch8(JMSException):
|
||||
default_code = 'not_support_elasticsearch8'
|
||||
default_detail = _('Not Support Elasticsearch8')
|
||||
|
||||
|
||||
class ES(object):
|
||||
def __init__(self, config, properties, keyword_fields, exact_fields=None, match_fields=None):
|
||||
|
||||
self.config = config
|
||||
hosts = self.config.get('HOSTS')
|
||||
kwargs = self.config.get('OTHER', {})
|
||||
|
||||
ignore_verify_certs = kwargs.pop('IGNORE_VERIFY_CERTS', False)
|
||||
if ignore_verify_certs:
|
||||
kwargs['verify_certs'] = None
|
||||
self.es = Elasticsearch(hosts=hosts, max_retries=0, **kwargs)
|
||||
self.index_prefix = self.config.get('INDEX') or 'jumpserver'
|
||||
self.is_index_by_date = bool(self.config.get('INDEX_BY_DATE', False))
|
||||
|
||||
self.index = None
|
||||
self.query_index = None
|
||||
self.properties = properties
|
||||
self.exact_fields, self.match_fields, self.keyword_fields = set(), set(), set()
|
||||
|
||||
if isinstance(keyword_fields, Iterable):
|
||||
self.keyword_fields.update(keyword_fields)
|
||||
if isinstance(exact_fields, Iterable):
|
||||
self.exact_fields.update(exact_fields)
|
||||
if isinstance(match_fields, Iterable):
|
||||
self.match_fields.update(match_fields)
|
||||
|
||||
self.init_index()
|
||||
self.doc_type = self.config.get("DOC_TYPE") or '_doc'
|
||||
if self.is_new_index_type():
|
||||
self.doc_type = '_doc'
|
||||
self.exact_fields.update(self.keyword_fields)
|
||||
else:
|
||||
self.match_fields.update(self.keyword_fields)
|
||||
|
||||
def init_index(self):
|
||||
if self.is_index_by_date:
|
||||
date = local_now_date_display()
|
||||
self.index = '%s-%s' % (self.index_prefix, date)
|
||||
self.query_index = '%s-alias' % self.index_prefix
|
||||
else:
|
||||
self.index = self.config.get("INDEX") or 'jumpserver'
|
||||
self.query_index = self.config.get("INDEX") or 'jumpserver'
|
||||
|
||||
def is_new_index_type(self):
|
||||
if not self.ping(timeout=2):
|
||||
return False
|
||||
|
||||
info = self.es.info()
|
||||
version = info['version']['number'].split('.')[0]
|
||||
|
||||
if version == '8':
|
||||
raise NotSupportElasticsearch8
|
||||
|
||||
try:
|
||||
# 获取索引信息,如果没有定义,直接返回
|
||||
data = self.es.indices.get_mapping(self.index)
|
||||
except NotFoundError:
|
||||
return False
|
||||
|
||||
try:
|
||||
if version == '6':
|
||||
# 检测索引是不是新的类型 es6
|
||||
properties = data[self.index]['mappings']['data']['properties']
|
||||
else:
|
||||
# 检测索引是不是新的类型 es7 default index type: _doc
|
||||
properties = data[self.index]['mappings']['properties']
|
||||
|
||||
for keyword in self.keyword_fields:
|
||||
if not properties[keyword]['type'] == 'keyword':
|
||||
break
|
||||
else:
|
||||
return True
|
||||
except KeyError:
|
||||
return False
|
||||
|
||||
def pre_use_check(self):
|
||||
if not self.ping(timeout=3):
|
||||
raise InvalidElasticsearch
|
||||
self._ensure_index_exists()
|
||||
|
||||
def _ensure_index_exists(self):
|
||||
info = self.es.info()
|
||||
version = info['version']['number'].split('.')[0]
|
||||
if version == '6':
|
||||
mappings = {'mappings': {'data': {'properties': self.properties}}}
|
||||
else:
|
||||
mappings = {'mappings': {'properties': self.properties}}
|
||||
|
||||
if self.is_index_by_date:
|
||||
mappings['aliases'] = {
|
||||
self.query_index: {}
|
||||
}
|
||||
|
||||
try:
|
||||
self.es.indices.create(self.index, body=mappings)
|
||||
return
|
||||
except RequestError as e:
|
||||
if e.error == 'resource_already_exists_exception':
|
||||
logger.warning(e)
|
||||
else:
|
||||
logger.exception(e)
|
||||
|
||||
def make_data(self, data):
|
||||
return []
|
||||
|
||||
def save(self, **kwargs):
|
||||
data = self.make_data(kwargs)
|
||||
return self.es.index(index=self.index, doc_type=self.doc_type, body=data)
|
||||
|
||||
def bulk_save(self, command_set, raise_on_error=True):
|
||||
actions = []
|
||||
for command in command_set:
|
||||
data = dict(
|
||||
_index=self.index,
|
||||
_type=self.doc_type,
|
||||
_source=self.make_data(command),
|
||||
)
|
||||
actions.append(data)
|
||||
return bulk(self.es, actions, index=self.index, raise_on_error=raise_on_error)
|
||||
|
||||
def get(self, query: dict):
|
||||
item = None
|
||||
data = self.filter(query, size=1)
|
||||
if len(data) >= 1:
|
||||
item = data[0]
|
||||
return item
|
||||
|
||||
def filter(self, query: dict, from_=None, size=None, sort=None):
|
||||
try:
|
||||
data = self._filter(query, from_, size, sort)
|
||||
except Exception as e:
|
||||
logger.error('ES filter error: {}'.format(e))
|
||||
data = []
|
||||
return data
|
||||
|
||||
def _filter(self, query: dict, from_=None, size=None, sort=None):
|
||||
body = self.get_query_body(**query)
|
||||
|
||||
data = self.es.search(
|
||||
index=self.query_index, doc_type=self.doc_type, body=body,
|
||||
from_=from_, size=size, sort=sort
|
||||
)
|
||||
source_data = []
|
||||
for item in data['hits']['hits']:
|
||||
if item:
|
||||
item['_source'].update({'es_id': item['_id']})
|
||||
source_data.append(item['_source'])
|
||||
|
||||
return source_data
|
||||
|
||||
def count(self, **query):
|
||||
try:
|
||||
body = self.get_query_body(**query)
|
||||
data = self.es.count(index=self.query_index, doc_type=self.doc_type, body=body)
|
||||
count = data["count"]
|
||||
except Exception as e:
|
||||
logger.error('ES count error: {}'.format(e))
|
||||
count = 0
|
||||
return count
|
||||
|
||||
def __getattr__(self, item):
|
||||
return getattr(self.es, item)
|
||||
|
||||
def all(self):
|
||||
"""返回所有数据"""
|
||||
raise NotImplementedError("Not support")
|
||||
|
||||
def ping(self, timeout=None):
|
||||
try:
|
||||
return self.es.ping(request_timeout=timeout)
|
||||
except Exception:
|
||||
return False
|
||||
|
||||
@staticmethod
|
||||
def handler_time_field(data):
|
||||
datetime__gte = data.get('datetime__gte')
|
||||
datetime__lte = data.get('datetime__lte')
|
||||
datetime_range = {}
|
||||
|
||||
if datetime__gte:
|
||||
if isinstance(datetime__gte, datetime.datetime):
|
||||
datetime__gte = datetime__gte.strftime('%Y-%m-%d %H:%M:%S')
|
||||
datetime_range['gte'] = datetime__gte
|
||||
if datetime__lte:
|
||||
if isinstance(datetime__lte, datetime.datetime):
|
||||
datetime__lte = datetime__lte.strftime('%Y-%m-%d %H:%M:%S')
|
||||
datetime_range['lte'] = datetime__lte
|
||||
return 'datetime', datetime_range
|
||||
|
||||
def get_query_body(self, **kwargs):
|
||||
new_kwargs = {}
|
||||
for k, v in kwargs.items():
|
||||
if isinstance(v, UUID):
|
||||
v = str(v)
|
||||
if k == 'pk':
|
||||
k = 'id'
|
||||
new_kwargs[k] = v
|
||||
kwargs = new_kwargs
|
||||
|
||||
index_in_field = 'id__in'
|
||||
exact_fields = self.exact_fields
|
||||
match_fields = self.match_fields
|
||||
|
||||
match = {}
|
||||
exact = {}
|
||||
index = {}
|
||||
|
||||
if index_in_field in kwargs:
|
||||
index['values'] = kwargs[index_in_field]
|
||||
|
||||
for k, v in kwargs.items():
|
||||
if k in exact_fields:
|
||||
exact[k] = v
|
||||
elif k in match_fields:
|
||||
match[k] = v
|
||||
|
||||
# 处理时间
|
||||
time_field_name, time_range = self.handler_time_field(kwargs)
|
||||
|
||||
# 处理组织
|
||||
should = []
|
||||
org_id = match.get('org_id')
|
||||
|
||||
real_default_org_id = '00000000-0000-0000-0000-000000000002'
|
||||
root_org_id = '00000000-0000-0000-0000-000000000000'
|
||||
|
||||
if org_id == root_org_id:
|
||||
match.pop('org_id')
|
||||
elif org_id in (real_default_org_id, ''):
|
||||
match.pop('org_id')
|
||||
should.append({
|
||||
'bool': {
|
||||
'must_not': [
|
||||
{
|
||||
'wildcard': {'org_id': '*'}
|
||||
}
|
||||
]}
|
||||
})
|
||||
should.append({'match': {'org_id': real_default_org_id}})
|
||||
|
||||
# 构建 body
|
||||
body = {
|
||||
'query': {
|
||||
'bool': {
|
||||
'must': [
|
||||
{'match': {k: v}} for k, v in match.items()
|
||||
],
|
||||
'should': should,
|
||||
'filter': [
|
||||
{
|
||||
'term': {k: v}
|
||||
} for k, v in exact.items()
|
||||
] + [
|
||||
{
|
||||
'range': {
|
||||
time_field_name: time_range
|
||||
}
|
||||
}
|
||||
] + [
|
||||
{
|
||||
'ids': {k: v}
|
||||
} for k, v in index.items()
|
||||
]
|
||||
}
|
||||
},
|
||||
}
|
||||
return body
|
||||
|
||||
|
||||
class QuerySet(DJQuerySet):
|
||||
default_days_ago = 7
|
||||
max_result_window = 10000
|
||||
|
||||
def __init__(self, es_instance):
|
||||
self._method_calls = []
|
||||
self._slice = None # (from_, size)
|
||||
self._storage = es_instance
|
||||
|
||||
# 命令列表模糊搜索时报错
|
||||
super().__init__()
|
||||
|
||||
@lazyproperty
|
||||
def _grouped_method_calls(self):
|
||||
_method_calls = {k: list(v) for k, v in groupby(self._method_calls, lambda x: x[0])}
|
||||
return _method_calls
|
||||
|
||||
@lazyproperty
|
||||
def _filter_kwargs(self):
|
||||
_method_calls = self._grouped_method_calls
|
||||
filter_calls = _method_calls.get('filter')
|
||||
if not filter_calls:
|
||||
return {}
|
||||
names, multi_args, multi_kwargs = zip(*filter_calls)
|
||||
kwargs = reduce(lambda x, y: {**x, **y}, multi_kwargs, {})
|
||||
|
||||
striped_kwargs = {}
|
||||
for k, v in kwargs.items():
|
||||
k = k.replace('__exact', '')
|
||||
k = k.replace('__startswith', '')
|
||||
k = k.replace('__icontains', '')
|
||||
striped_kwargs[k] = v
|
||||
return striped_kwargs
|
||||
|
||||
@lazyproperty
|
||||
def _sort(self):
|
||||
order_by = self._grouped_method_calls.get('order_by')
|
||||
if order_by:
|
||||
for call in reversed(order_by):
|
||||
fields = call[1]
|
||||
if fields:
|
||||
field = fields[-1]
|
||||
|
||||
if field.startswith('-'):
|
||||
direction = 'desc'
|
||||
else:
|
||||
direction = 'asc'
|
||||
field = field.lstrip('-+')
|
||||
sort = f'{field}:{direction}'
|
||||
return sort
|
||||
|
||||
def __execute(self):
|
||||
_filter_kwargs = self._filter_kwargs
|
||||
_sort = self._sort
|
||||
from_, size = self._slice or (None, None)
|
||||
data = self._storage.filter(_filter_kwargs, from_=from_, size=size, sort=_sort)
|
||||
return self.model.from_multi_dict(data)
|
||||
|
||||
def __stage_method_call(self, item, *args, **kwargs):
|
||||
_clone = self.__clone()
|
||||
_clone._method_calls.append((item, args, kwargs))
|
||||
return _clone
|
||||
|
||||
def __clone(self):
|
||||
uqs = QuerySet(self._storage)
|
||||
uqs._method_calls = self._method_calls.copy()
|
||||
uqs._slice = self._slice
|
||||
uqs.model = self.model
|
||||
return uqs
|
||||
|
||||
def get(self, **kwargs):
|
||||
kwargs.update(self._filter_kwargs)
|
||||
return self._storage.get(kwargs)
|
||||
|
||||
def count(self, limit_to_max_result_window=True):
|
||||
filter_kwargs = self._filter_kwargs
|
||||
count = self._storage.count(**filter_kwargs)
|
||||
if limit_to_max_result_window:
|
||||
count = min(count, self.max_result_window)
|
||||
return count
|
||||
|
||||
def __getattribute__(self, item):
|
||||
if any((
|
||||
item.startswith('__'),
|
||||
item in QuerySet.__dict__,
|
||||
)):
|
||||
return object.__getattribute__(self, item)
|
||||
|
||||
origin_attr = object.__getattribute__(self, item)
|
||||
if not inspect.ismethod(origin_attr):
|
||||
return origin_attr
|
||||
|
||||
attr = partial(self.__stage_method_call, item)
|
||||
return attr
|
||||
|
||||
def __getitem__(self, item):
|
||||
max_window = self.max_result_window
|
||||
if isinstance(item, slice):
|
||||
if self._slice is None:
|
||||
clone = self.__clone()
|
||||
from_ = item.start or 0
|
||||
if item.stop is None:
|
||||
size = self.max_result_window - from_
|
||||
else:
|
||||
size = item.stop - from_
|
||||
|
||||
if from_ + size > max_window:
|
||||
if from_ >= max_window:
|
||||
from_ = max_window
|
||||
size = 0
|
||||
else:
|
||||
size = max_window - from_
|
||||
clone._slice = (from_, size)
|
||||
return clone
|
||||
return self.__execute()[item]
|
||||
|
||||
def __repr__(self):
|
||||
return self.__execute().__repr__()
|
||||
|
||||
def __iter__(self):
|
||||
return iter(self.__execute())
|
||||
|
||||
def __len__(self):
|
||||
return self.count()
|
|
@ -1,2 +1 @@
|
|||
from .endpoint import SMS, BACKENDS
|
||||
from .utils import SendAndVerifySMSUtil
|
||||
|
|
|
@ -0,0 +1,21 @@
|
|||
from django.utils.translation import gettext_lazy as _
|
||||
|
||||
from common.exceptions import JMSException
|
||||
|
||||
|
||||
class CodeExpired(JMSException):
|
||||
default_code = 'verify_code_expired'
|
||||
default_detail = _('The verification code has expired. Please resend it')
|
||||
|
||||
|
||||
class CodeError(JMSException):
|
||||
default_code = 'verify_code_error'
|
||||
default_detail = _('The verification code is incorrect')
|
||||
|
||||
|
||||
class CodeSendTooFrequently(JMSException):
|
||||
default_code = 'code_send_too_frequently'
|
||||
default_detail = _('Please wait {} seconds before sending')
|
||||
|
||||
def __init__(self, ttl):
|
||||
super().__init__(detail=self.default_detail.format(ttl))
|
|
@ -1,90 +0,0 @@
|
|||
import random
|
||||
|
||||
from django.core.cache import cache
|
||||
from django.utils.translation import gettext_lazy as _
|
||||
|
||||
from .endpoint import SMS
|
||||
from common.utils import get_logger
|
||||
from common.exceptions import JMSException
|
||||
|
||||
logger = get_logger(__file__)
|
||||
|
||||
|
||||
class CodeExpired(JMSException):
|
||||
default_code = 'verify_code_expired'
|
||||
default_detail = _('The verification code has expired. Please resend it')
|
||||
|
||||
|
||||
class CodeError(JMSException):
|
||||
default_code = 'verify_code_error'
|
||||
default_detail = _('The verification code is incorrect')
|
||||
|
||||
|
||||
class CodeSendTooFrequently(JMSException):
|
||||
default_code = 'code_send_too_frequently'
|
||||
default_detail = _('Please wait {} seconds before sending')
|
||||
|
||||
def __init__(self, ttl):
|
||||
super().__init__(detail=self.default_detail.format(ttl))
|
||||
|
||||
|
||||
class SendAndVerifySMSUtil:
|
||||
KEY_TMPL = 'auth-verify-code-{}'
|
||||
TIMEOUT = 60
|
||||
|
||||
def __init__(self, phone, key_suffix=None, timeout=None):
|
||||
self.phone = phone
|
||||
self.code = ''
|
||||
self.timeout = timeout or self.TIMEOUT
|
||||
self.key_suffix = key_suffix or str(phone)
|
||||
self.key = self.KEY_TMPL.format(self.key_suffix)
|
||||
|
||||
def gen_and_send(self):
|
||||
"""
|
||||
生成,保存,发送
|
||||
"""
|
||||
ttl = self.ttl()
|
||||
if ttl > 0:
|
||||
logger.error('Send sms too frequently, delay {}'.format(ttl))
|
||||
raise CodeSendTooFrequently(ttl)
|
||||
|
||||
try:
|
||||
code = self.generate()
|
||||
self.send(code)
|
||||
except JMSException:
|
||||
self.clear()
|
||||
raise
|
||||
|
||||
def generate(self):
|
||||
code = ''.join(random.sample('0123456789', 4))
|
||||
self.code = code
|
||||
return code
|
||||
|
||||
def clear(self):
|
||||
cache.delete(self.key)
|
||||
|
||||
def send(self, code):
|
||||
"""
|
||||
发送信息的方法,如果有错误直接抛出 api 异常
|
||||
"""
|
||||
sms = SMS()
|
||||
sms.send_verify_code(self.phone, code)
|
||||
cache.set(self.key, self.code, self.timeout)
|
||||
logger.info(f'Send sms verify code to {self.phone}: {code}')
|
||||
|
||||
def verify(self, code):
|
||||
right = cache.get(self.key)
|
||||
if not right:
|
||||
raise CodeExpired
|
||||
|
||||
if right != code:
|
||||
raise CodeError
|
||||
|
||||
self.clear()
|
||||
return True
|
||||
|
||||
def ttl(self):
|
||||
return cache.ttl(self.key)
|
||||
|
||||
def get_code(self):
|
||||
return cache.get(self.key)
|
|
@ -0,0 +1,94 @@
|
|||
from django.core.cache import cache
|
||||
from django.conf import settings
|
||||
from django.core.mail import send_mail
|
||||
from celery import shared_task
|
||||
|
||||
from common.sdk.sms.exceptions import CodeError, CodeExpired, CodeSendTooFrequently
|
||||
from common.sdk.sms.endpoint import SMS
|
||||
from common.exceptions import JMSException
|
||||
from common.utils.random import random_string
|
||||
from common.utils import get_logger
|
||||
|
||||
|
||||
logger = get_logger(__file__)
|
||||
|
||||
|
||||
@shared_task
|
||||
def send_async(sender):
|
||||
sender.gen_and_send()
|
||||
|
||||
|
||||
class SendAndVerifyCodeUtil(object):
|
||||
KEY_TMPL = 'auth-verify-code-{}'
|
||||
|
||||
def __init__(self, target, code=None, key=None, backend='email', timeout=60, **kwargs):
|
||||
self.target = target
|
||||
self.code = code
|
||||
self.timeout = timeout
|
||||
self.backend = backend
|
||||
self.key = key or self.KEY_TMPL.format(target)
|
||||
self.other_args = kwargs
|
||||
|
||||
def gen_and_send_async(self):
|
||||
return send_async.delay(self)
|
||||
|
||||
def gen_and_send(self):
|
||||
ttl = self.__ttl()
|
||||
if ttl > 0:
|
||||
logger.error('Send sms too frequently, delay {}'.format(ttl))
|
||||
raise CodeSendTooFrequently(ttl)
|
||||
|
||||
try:
|
||||
if not self.code:
|
||||
self.code = self.__generate()
|
||||
self.__send(self.code)
|
||||
except JMSException:
|
||||
self.__clear()
|
||||
raise
|
||||
|
||||
def verify(self, code):
|
||||
right = cache.get(self.key)
|
||||
if not right:
|
||||
raise CodeExpired
|
||||
|
||||
if right != code:
|
||||
raise CodeError
|
||||
|
||||
self.__clear()
|
||||
return True
|
||||
|
||||
def __clear(self):
|
||||
cache.delete(self.key)
|
||||
|
||||
def __ttl(self):
|
||||
return cache.ttl(self.key)
|
||||
|
||||
def __get_code(self):
|
||||
return cache.get(self.key)
|
||||
|
||||
def __generate(self):
|
||||
code = random_string(4, lower=False, upper=False)
|
||||
self.code = code
|
||||
return code
|
||||
|
||||
def __send_with_sms(self):
|
||||
sms = SMS()
|
||||
sms.send_verify_code(self.target, self.code)
|
||||
|
||||
def __send_with_email(self):
|
||||
subject = self.other_args.get('subject')
|
||||
message = self.other_args.get('message')
|
||||
from_email = settings.EMAIL_FROM or settings.EMAIL_HOST_USER
|
||||
send_mail(subject, message, from_email, [self.target], html_message=message)
|
||||
|
||||
def __send(self, code):
|
||||
"""
|
||||
发送信息的方法,如果有错误直接抛出 api 异常
|
||||
"""
|
||||
if self.backend == 'sms':
|
||||
self.__send_with_sms()
|
||||
else:
|
||||
self.__send_with_email()
|
||||
|
||||
cache.set(self.key, self.code, self.timeout)
|
||||
logger.info(f'Send verify code to {self.target}: {code}')
|
|
@ -227,6 +227,10 @@ class Config(dict):
|
|||
'AUTH_CUSTOM': False,
|
||||
'AUTH_CUSTOM_FILE_MD5': '',
|
||||
|
||||
# Custom Config
|
||||
'MFA_CUSTOM': False,
|
||||
'MFA_CUSTOM_FILE_MD5': '',
|
||||
|
||||
# Auth LDAP settings
|
||||
'AUTH_LDAP': False,
|
||||
'AUTH_LDAP_SERVER_URI': 'ldap://localhost:389',
|
||||
|
@ -416,6 +420,9 @@ class Config(dict):
|
|||
'TERMINAL_HOST_KEY': '',
|
||||
'TERMINAL_TELNET_REGEX': '',
|
||||
'TERMINAL_COMMAND_STORAGE': {},
|
||||
# Luna 页面
|
||||
# 默认图形化分辨率
|
||||
'TERMINAL_GRAPHICAL_RESOLUTION': 'Auto',
|
||||
# 未来废弃(目前迁移会用)
|
||||
'TERMINAL_RDP_ADDR': '',
|
||||
# 保留(Luna还在用)
|
||||
|
|
|
@ -0,0 +1,6 @@
|
|||
from django.conf import settings
|
||||
from rest_framework.pagination import LimitOffsetPagination
|
||||
|
||||
|
||||
class MaxLimitOffsetPagination(LimitOffsetPagination):
|
||||
max_limit = settings.MAX_LIMIT_PER_PAGE or 100
|
|
@ -51,7 +51,6 @@ AUTH_LDAP_SYNC_CRONTAB = CONFIG.AUTH_LDAP_SYNC_CRONTAB
|
|||
AUTH_LDAP_SYNC_ORG_ID = CONFIG.AUTH_LDAP_SYNC_ORG_ID
|
||||
AUTH_LDAP_USER_LOGIN_ONLY_IN_USERS = CONFIG.AUTH_LDAP_USER_LOGIN_ONLY_IN_USERS
|
||||
|
||||
|
||||
# ==============================================================================
|
||||
# 认证 OpenID 配置参数
|
||||
# 参考: https://django-oidc-rp.readthedocs.io/en/stable/settings.html
|
||||
|
@ -180,7 +179,6 @@ if CONNECTION_TOKEN_EXPIRATION < 5 * 60:
|
|||
# 最少5分钟
|
||||
CONNECTION_TOKEN_EXPIRATION = 5 * 60
|
||||
|
||||
|
||||
RBAC_BACKEND = 'rbac.backends.RBACBackend'
|
||||
AUTH_BACKEND_MODEL = 'authentication.backends.base.JMSModelBackend'
|
||||
AUTH_BACKEND_PUBKEY = 'authentication.backends.pubkey.PublicKeyAuthBackend'
|
||||
|
@ -203,7 +201,7 @@ AUTHENTICATION_BACKENDS = [
|
|||
# 只做权限校验
|
||||
RBAC_BACKEND,
|
||||
# 密码形式
|
||||
AUTH_BACKEND_MODEL, AUTH_BACKEND_PUBKEY, AUTH_BACKEND_LDAP, AUTH_BACKEND_RADIUS,
|
||||
AUTH_BACKEND_MODEL, AUTH_BACKEND_PUBKEY, AUTH_BACKEND_LDAP, AUTH_BACKEND_RADIUS,
|
||||
# 跳转形式
|
||||
AUTH_BACKEND_CAS, AUTH_BACKEND_OIDC_PASSWORD, AUTH_BACKEND_OIDC_CODE, AUTH_BACKEND_SAML2,
|
||||
AUTH_BACKEND_OAUTH2,
|
||||
|
@ -236,7 +234,22 @@ if AUTH_CUSTOM and AUTH_CUSTOM_FILE_MD5 == get_file_md5(AUTH_CUSTOM_FILE_PATH):
|
|||
# 自定义认证模块
|
||||
AUTHENTICATION_BACKENDS.append(AUTH_BACKEND_CUSTOM)
|
||||
|
||||
AUTHENTICATION_BACKENDS_THIRD_PARTY = [AUTH_BACKEND_OIDC_CODE, AUTH_BACKEND_CAS, AUTH_BACKEND_SAML2, AUTH_BACKEND_OAUTH2]
|
||||
MFA_BACKEND_OTP = 'authentication.mfa.otp.MFAOtp'
|
||||
MFA_BACKEND_RADIUS = 'authentication.mfa.radius.MFARadius'
|
||||
MFA_BACKEND_SMS = 'authentication.mfa.sms.MFASms'
|
||||
MFA_BACKEND_CUSTOM = 'authentication.mfa.custom.MFACustom'
|
||||
|
||||
MFA_BACKENDS = [MFA_BACKEND_OTP, MFA_BACKEND_RADIUS, MFA_BACKEND_SMS]
|
||||
|
||||
MFA_CUSTOM = CONFIG.MFA_CUSTOM
|
||||
MFA_CUSTOM_FILE_MD5 = CONFIG.MFA_CUSTOM_FILE_MD5
|
||||
MFA_CUSTOM_FILE_PATH = os.path.join(PROJECT_DIR, 'data', 'mfa', 'main.py')
|
||||
if MFA_CUSTOM and MFA_CUSTOM_FILE_MD5 == get_file_md5(MFA_CUSTOM_FILE_PATH):
|
||||
# 自定义多因子认证模块
|
||||
MFA_BACKENDS.append(MFA_BACKEND_CUSTOM)
|
||||
|
||||
AUTHENTICATION_BACKENDS_THIRD_PARTY = [AUTH_BACKEND_OIDC_CODE, AUTH_BACKEND_CAS, AUTH_BACKEND_SAML2,
|
||||
AUTH_BACKEND_OAUTH2]
|
||||
ONLY_ALLOW_EXIST_USER_AUTH = CONFIG.ONLY_ALLOW_EXIST_USER_AUTH
|
||||
ONLY_ALLOW_AUTH_FROM_SOURCE = CONFIG.ONLY_ALLOW_AUTH_FROM_SOURCE
|
||||
|
||||
|
|
|
@ -19,6 +19,16 @@ def exist_or_default(path, default):
|
|||
return path
|
||||
|
||||
|
||||
def parse_sentinels_host(sentinels_host):
|
||||
service_name, sentinels = None, None
|
||||
try:
|
||||
service_name, hosts = sentinels_host.split('/', 1)
|
||||
sentinels = [tuple(h.split(':', 1)) for h in hosts.split(',')]
|
||||
except Exception:
|
||||
pass
|
||||
return service_name, sentinels
|
||||
|
||||
|
||||
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
|
||||
VERSION = const.VERSION
|
||||
BASE_DIR = const.BASE_DIR
|
||||
|
@ -276,26 +286,53 @@ REDIS_SSL_CA = exist_or_default(os.path.join(CERTS_DIR, 'redis_ca.pem'), None)
|
|||
REDIS_SSL_CA = exist_or_default(os.path.join(CERTS_DIR, 'redis_ca.crt'), REDIS_SSL_CA)
|
||||
REDIS_SSL_REQUIRED = 'none'
|
||||
REDIS_USE_SSL = CONFIG.REDIS_USE_SSL
|
||||
REDIS_PROTOCOL = 'rediss' if REDIS_USE_SSL else 'redis'
|
||||
# Cache use sentinel
|
||||
REDIS_SENTINEL_HOSTS = CONFIG.REDIS_SENTINEL_HOSTS
|
||||
REDIS_SENTINEL_SERVICE_NAME, REDIS_SENTINELS = parse_sentinels_host(REDIS_SENTINEL_HOSTS)
|
||||
REDIS_SENTINEL_PASSWORD = CONFIG.REDIS_SENTINEL_PASSWORD
|
||||
if CONFIG.REDIS_SENTINEL_SOCKET_TIMEOUT:
|
||||
REDIS_SENTINEL_SOCKET_TIMEOUT = float(CONFIG.REDIS_SENTINEL_SOCKET_TIMEOUT)
|
||||
else:
|
||||
REDIS_SENTINEL_SOCKET_TIMEOUT = None
|
||||
|
||||
REDIS_LOCATION_NO_DB = '%(protocol)s://:%(password)s@%(host)s:%(port)s/{}' % {
|
||||
'protocol': 'rediss' if REDIS_USE_SSL else 'redis',
|
||||
'password': CONFIG.REDIS_PASSWORD,
|
||||
'host': CONFIG.REDIS_HOST,
|
||||
'port': CONFIG.REDIS_PORT,
|
||||
# Cache config
|
||||
REDIS_OPTIONS = {
|
||||
"REDIS_CLIENT_KWARGS": {
|
||||
"health_check_interval": 30
|
||||
},
|
||||
"CONNECTION_POOL_KWARGS": {
|
||||
'ssl_cert_reqs': REDIS_SSL_REQUIRED,
|
||||
"ssl_keyfile": REDIS_SSL_KEY,
|
||||
"ssl_certfile": REDIS_SSL_CERT,
|
||||
"ssl_ca_certs": REDIS_SSL_CA
|
||||
} if REDIS_USE_SSL else {}
|
||||
}
|
||||
|
||||
if REDIS_SENTINEL_SERVICE_NAME and REDIS_SENTINELS:
|
||||
REDIS_LOCATION_NO_DB = "%(protocol)s://%(service_name)s/{}" % {
|
||||
'protocol': REDIS_PROTOCOL, 'service_name': REDIS_SENTINEL_SERVICE_NAME,
|
||||
}
|
||||
REDIS_OPTIONS.update({
|
||||
'CLIENT_CLASS': 'django_redis.client.SentinelClient',
|
||||
'SENTINELS': REDIS_SENTINELS, 'PASSWORD': CONFIG.REDIS_PASSWORD,
|
||||
'SENTINEL_KWARGS': {
|
||||
'password': REDIS_SENTINEL_PASSWORD,
|
||||
'socket_timeout': REDIS_SENTINEL_SOCKET_TIMEOUT
|
||||
}
|
||||
})
|
||||
DJANGO_REDIS_CONNECTION_FACTORY = 'django_redis.pool.SentinelConnectionFactory'
|
||||
else:
|
||||
REDIS_LOCATION_NO_DB = '%(protocol)s://:%(password)s@%(host)s:%(port)s/{}' % {
|
||||
'protocol': REDIS_PROTOCOL, 'password': CONFIG.REDIS_PASSWORD,
|
||||
'host': CONFIG.REDIS_HOST, 'port': CONFIG.REDIS_PORT,
|
||||
}
|
||||
|
||||
|
||||
REDIS_CACHE_DEFAULT = {
|
||||
'BACKEND': 'redis_lock.django_cache.RedisCache',
|
||||
'LOCATION': REDIS_LOCATION_NO_DB.format(CONFIG.REDIS_DB_CACHE),
|
||||
'OPTIONS': {
|
||||
"REDIS_CLIENT_KWARGS": {"health_check_interval": 30},
|
||||
"CONNECTION_POOL_KWARGS": {
|
||||
'ssl_cert_reqs': REDIS_SSL_REQUIRED,
|
||||
"ssl_keyfile": REDIS_SSL_KEY,
|
||||
"ssl_certfile": REDIS_SSL_CERT,
|
||||
"ssl_ca_certs": REDIS_SSL_CA
|
||||
} if REDIS_USE_SSL else {}
|
||||
}
|
||||
'OPTIONS': REDIS_OPTIONS
|
||||
}
|
||||
REDIS_CACHE_SESSION = dict(REDIS_CACHE_DEFAULT)
|
||||
REDIS_CACHE_SESSION['LOCATION'] = REDIS_LOCATION_NO_DB.format(CONFIG.REDIS_DB_SESSION)
|
||||
|
|
|
@ -81,6 +81,9 @@ TERMINAL_HOST_KEY = CONFIG.TERMINAL_HOST_KEY
|
|||
TERMINAL_HEADER_TITLE = CONFIG.TERMINAL_HEADER_TITLE
|
||||
TERMINAL_TELNET_REGEX = CONFIG.TERMINAL_TELNET_REGEX
|
||||
|
||||
# 默认图形化分辨率
|
||||
TERMINAL_GRAPHICAL_RESOLUTION = CONFIG.TERMINAL_GRAPHICAL_RESOLUTION
|
||||
|
||||
# Asset user auth external backend, default AuthBook backend
|
||||
BACKEND_ASSET_USER_AUTH_VAULT = False
|
||||
|
||||
|
@ -178,5 +181,9 @@ HELP_SUPPORT_URL = CONFIG.HELP_SUPPORT_URL
|
|||
SESSION_RSA_PRIVATE_KEY_NAME = 'jms_private_key'
|
||||
SESSION_RSA_PUBLIC_KEY_NAME = 'jms_public_key'
|
||||
|
||||
OPERATE_LOG_ELASTICSEARCH_CONFIG = CONFIG.OPERATE_LOG_ELASTICSEARCH_CONFIG
|
||||
|
||||
MAX_LIMIT_PER_PAGE = CONFIG.MAX_LIMIT_PER_PAGE
|
||||
|
||||
# Magnus DB Port
|
||||
MAGNUS_PORTS = CONFIG.MAGNUS_PORTS
|
||||
|
|
|
@ -4,8 +4,9 @@ import os
|
|||
import ssl
|
||||
|
||||
from .base import (
|
||||
REDIS_SSL_CA, REDIS_SSL_CERT, REDIS_SSL_KEY,
|
||||
REDIS_SSL_REQUIRED, REDIS_USE_SSL
|
||||
REDIS_SSL_CA, REDIS_SSL_CERT, REDIS_SSL_KEY, REDIS_SSL_REQUIRED, REDIS_USE_SSL,
|
||||
REDIS_SENTINEL_SERVICE_NAME, REDIS_SENTINELS, REDIS_SENTINEL_PASSWORD,
|
||||
REDIS_SENTINEL_SOCKET_TIMEOUT
|
||||
)
|
||||
from ..const import CONFIG, PROJECT_DIR
|
||||
|
||||
|
@ -46,7 +47,7 @@ REST_FRAMEWORK = {
|
|||
'SEARCH_PARAM': "search",
|
||||
'DATETIME_FORMAT': '%Y/%m/%d %H:%M:%S %z',
|
||||
'DATETIME_INPUT_FORMATS': ['%Y/%m/%d %H:%M:%S %z', 'iso-8601', '%Y-%m-%d %H:%M:%S %z'],
|
||||
'DEFAULT_PAGINATION_CLASS': 'rest_framework.pagination.LimitOffsetPagination',
|
||||
'DEFAULT_PAGINATION_CLASS': 'jumpserver.rewriting.pagination.MaxLimitOffsetPagination',
|
||||
'EXCEPTION_HANDLER': 'common.drf.exc_handlers.common_exception_handler',
|
||||
}
|
||||
|
||||
|
@ -90,16 +91,28 @@ else:
|
|||
if REDIS_SSL_CERT and REDIS_SSL_KEY:
|
||||
redis_ssl.load_cert_chain(REDIS_SSL_CERT, REDIS_SSL_KEY)
|
||||
|
||||
REDIS_HOST = {
|
||||
'db': CONFIG.REDIS_DB_WS,
|
||||
'password': CONFIG.REDIS_PASSWORD or None,
|
||||
'ssl': redis_ssl,
|
||||
}
|
||||
|
||||
if REDIS_SENTINEL_SERVICE_NAME and REDIS_SENTINELS:
|
||||
REDIS_HOST['sentinels'] = REDIS_SENTINELS
|
||||
REDIS_HOST['master_name'] = REDIS_SENTINEL_SERVICE_NAME
|
||||
REDIS_HOST['sentinel_kwargs'] = {
|
||||
'password': REDIS_SENTINEL_PASSWORD,
|
||||
'socket_timeout': REDIS_SENTINEL_SOCKET_TIMEOUT
|
||||
}
|
||||
else:
|
||||
REDIS_HOST['address'] = (CONFIG.REDIS_HOST, CONFIG.REDIS_PORT)
|
||||
|
||||
|
||||
CHANNEL_LAYERS = {
|
||||
'default': {
|
||||
'BACKEND': 'common.cache.RedisChannelLayer',
|
||||
'CONFIG': {
|
||||
"hosts": [{
|
||||
'address': (CONFIG.REDIS_HOST, CONFIG.REDIS_PORT),
|
||||
'db': CONFIG.REDIS_DB_WS,
|
||||
'password': CONFIG.REDIS_PASSWORD or None,
|
||||
'ssl': redis_ssl
|
||||
}],
|
||||
"hosts": [REDIS_HOST],
|
||||
},
|
||||
},
|
||||
}
|
||||
|
@ -109,13 +122,28 @@ ASGI_APPLICATION = 'jumpserver.routing.application'
|
|||
CELERY_LOG_DIR = os.path.join(PROJECT_DIR, 'data', 'celery')
|
||||
|
||||
# Celery using redis as broker
|
||||
CELERY_BROKER_URL = '%(protocol)s://:%(password)s@%(host)s:%(port)s/%(db)s' % {
|
||||
'protocol': 'rediss' if REDIS_USE_SSL else 'redis',
|
||||
'password': CONFIG.REDIS_PASSWORD,
|
||||
'host': CONFIG.REDIS_HOST,
|
||||
'port': CONFIG.REDIS_PORT,
|
||||
'db': CONFIG.REDIS_DB_CELERY,
|
||||
}
|
||||
CELERY_BROKER_URL_FORMAT = '%(protocol)s://:%(password)s@%(host)s:%(port)s/%(db)s'
|
||||
if REDIS_SENTINEL_SERVICE_NAME and REDIS_SENTINELS:
|
||||
CELERY_BROKER_URL = ';'.join([CELERY_BROKER_URL_FORMAT % {
|
||||
'protocol': 'sentinel', 'password': CONFIG.REDIS_PASSWORD,
|
||||
'host': item[0], 'port': item[1], 'db': CONFIG.REDIS_DB_CELERY
|
||||
} for item in REDIS_SENTINELS])
|
||||
SENTINEL_OPTIONS = {
|
||||
'master_name': REDIS_SENTINEL_SERVICE_NAME,
|
||||
'sentinel_kwargs': {
|
||||
'password': REDIS_SENTINEL_PASSWORD,
|
||||
'socket_timeout': REDIS_SENTINEL_SOCKET_TIMEOUT
|
||||
}
|
||||
}
|
||||
CELERY_BROKER_TRANSPORT_OPTIONS = CELERY_RESULT_BACKEND_TRANSPORT_OPTIONS = SENTINEL_OPTIONS
|
||||
else:
|
||||
CELERY_BROKER_URL = CELERY_BROKER_URL_FORMAT % {
|
||||
'protocol': 'rediss' if REDIS_USE_SSL else 'redis',
|
||||
'password': CONFIG.REDIS_PASSWORD,
|
||||
'host': CONFIG.REDIS_HOST,
|
||||
'port': CONFIG.REDIS_PORT,
|
||||
'db': CONFIG.REDIS_DB_CELERY,
|
||||
}
|
||||
CELERY_TASK_SERIALIZER = 'pickle'
|
||||
CELERY_RESULT_SERIALIZER = 'pickle'
|
||||
CELERY_RESULT_BACKEND = CELERY_BROKER_URL
|
||||
|
|
|
@ -1,3 +1,3 @@
|
|||
version https://git-lfs.github.com/spec/v1
|
||||
oid sha256:7522cd9a7e7853d078c81006cea7f6dbe4fb9d51ae7c6dddd50e8471536d4c0d
|
||||
size 133026
|
||||
oid sha256:b21e8af3ad29606b9ff36bbf5da8dd03a041b25e046c3cedabb8650390c0a4c7
|
||||
size 132358
|
||||
|
|
File diff suppressed because it is too large
Load Diff
|
@ -1,3 +1,3 @@
|
|||
version https://git-lfs.github.com/spec/v1
|
||||
oid sha256:a1e0b5e70491c6228017987091e46d14ccde03b6e56bfb330d1604240c6b3d09
|
||||
size 109554
|
||||
oid sha256:f9dbc8ef62e3746cebd07177934c7d68c373ff0ecceff4a83aeeb9e96829359b
|
||||
size 108613
|
||||
|
|
File diff suppressed because it is too large
Load Diff
|
@ -1,6 +1,4 @@
|
|||
from django.conf import settings
|
||||
|
||||
from common.sdk.sms.alibaba import AlibabaSMS as Client
|
||||
from common.sdk.sms.endpoint import SMS
|
||||
from .base import BackendBase
|
||||
|
||||
|
||||
|
@ -9,13 +7,7 @@ class SMS(BackendBase):
|
|||
is_enable_field_in_settings = 'SMS_ENABLED'
|
||||
|
||||
def __init__(self):
|
||||
"""
|
||||
暂时只对接阿里,之后再扩展
|
||||
"""
|
||||
self.client = Client(
|
||||
access_key_id=settings.ALIBABA_ACCESS_KEY_ID,
|
||||
access_key_secret=settings.ALIBABA_ACCESS_KEY_SECRET
|
||||
)
|
||||
self.client = SMS()
|
||||
|
||||
def send_msg(self, users, sign_name: str, template_code: str, template_param: dict):
|
||||
accounts, __, __ = self.get_accounts(users)
|
||||
|
|
|
@ -43,11 +43,11 @@ class Migration(migrations.Migration):
|
|||
('date_updated', models.DateTimeField(auto_now=True, verbose_name='Date updated')),
|
||||
('id', models.UUIDField(default=uuid.uuid4, primary_key=True, serialize=False)),
|
||||
('message_type', models.CharField(max_length=128)),
|
||||
('receive_backends', models.JSONField(default=list)),
|
||||
('receive_backends', models.JSONField(default=list, verbose_name='receive backend')),
|
||||
('user', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='user_msg_subscription', to=settings.AUTH_USER_MODEL)),
|
||||
],
|
||||
options={
|
||||
'abstract': False,
|
||||
'abstract': False, 'verbose_name': 'User message'
|
||||
},
|
||||
),
|
||||
migrations.CreateModel(
|
||||
|
@ -64,7 +64,7 @@ class Migration(migrations.Migration):
|
|||
('users', models.ManyToManyField(related_name='system_msg_subscriptions', to=settings.AUTH_USER_MODEL)),
|
||||
],
|
||||
options={
|
||||
'abstract': False,
|
||||
'abstract': False, 'verbose_name': 'System message'
|
||||
},
|
||||
),
|
||||
migrations.CreateModel(
|
||||
|
|
|
@ -1,8 +1,8 @@
|
|||
# Generated by Django 3.1.12 on 2021-09-09 11:46
|
||||
|
||||
import common.db.models
|
||||
from django.conf import settings
|
||||
from django.db import migrations, models
|
||||
import django.db.models.deletion
|
||||
|
||||
|
||||
def init_user_msg_subscription(apps, schema_editor):
|
||||
|
@ -49,7 +49,7 @@ class Migration(migrations.Migration):
|
|||
migrations.AlterField(
|
||||
model_name='usermsgsubscription',
|
||||
name='user',
|
||||
field=models.OneToOneField(on_delete=django.db.models.deletion.CASCADE, related_name='user_msg_subscription', to=settings.AUTH_USER_MODEL),
|
||||
field=models.OneToOneField(on_delete=common.db.models.CASCADE_SIGNAL_SKIP, related_name='user_msg_subscription', to=settings.AUTH_USER_MODEL, verbose_name='User'),
|
||||
),
|
||||
migrations.RunPython(init_user_msg_subscription)
|
||||
]
|
||||
|
|
|
@ -1,16 +1,23 @@
|
|||
from django.db import models
|
||||
from django.utils.translation import ugettext_lazy as _
|
||||
|
||||
from common.db.models import JMSModel
|
||||
from common.db.models import JMSModel, CASCADE_SIGNAL_SKIP
|
||||
|
||||
__all__ = ('SystemMsgSubscription', 'UserMsgSubscription')
|
||||
|
||||
|
||||
class UserMsgSubscription(JMSModel):
|
||||
user = models.OneToOneField('users.User', related_name='user_msg_subscription', on_delete=models.CASCADE)
|
||||
receive_backends = models.JSONField(default=list)
|
||||
user = models.OneToOneField(
|
||||
'users.User', related_name='user_msg_subscription', on_delete=CASCADE_SIGNAL_SKIP,
|
||||
verbose_name=_('User')
|
||||
)
|
||||
receive_backends = models.JSONField(default=list, verbose_name=_('receive backend'))
|
||||
|
||||
class Meta:
|
||||
verbose_name = _('User message')
|
||||
|
||||
def __str__(self):
|
||||
return f'{self.user} subscription: {self.receive_backends}'
|
||||
return _('{} subscription').format(self.user)
|
||||
|
||||
|
||||
class SystemMsgSubscription(JMSModel):
|
||||
|
@ -21,11 +28,19 @@ class SystemMsgSubscription(JMSModel):
|
|||
|
||||
message_type_label = ''
|
||||
|
||||
def __str__(self):
|
||||
return f'{self.message_type}'
|
||||
class Meta:
|
||||
verbose_name = _('System message')
|
||||
|
||||
def __repr__(self):
|
||||
return self.__str__()
|
||||
def set_message_type_label(self):
|
||||
# 采用手动调用,没设置成 property 的方式
|
||||
# 因为目前只有界面修改时会用到这个属性,避免实例化时占用资源计算
|
||||
from ..notifications import system_msgs
|
||||
msg_label = ''
|
||||
for msg in system_msgs:
|
||||
if msg.get('message_type') == self.message_type:
|
||||
msg_label = msg.get('message_type_label', '')
|
||||
break
|
||||
self.message_type_label = msg_label
|
||||
|
||||
@property
|
||||
def receivers(self):
|
||||
|
@ -47,3 +62,9 @@ class SystemMsgSubscription(JMSModel):
|
|||
receviers.append(recevier)
|
||||
|
||||
return receviers
|
||||
|
||||
def __str__(self):
|
||||
return f'{self.message_type_label}' or f'{self.message_type}'
|
||||
|
||||
def __repr__(self):
|
||||
return self.__str__()
|
||||
|
|
|
@ -22,6 +22,10 @@ class SystemMsgSubscriptionSerializer(BulkModelSerializer):
|
|||
'receive_backends': {'required': True}
|
||||
}
|
||||
|
||||
def update(self, instance, validated_data):
|
||||
instance.set_message_type_label()
|
||||
return super().update(instance, validated_data)
|
||||
|
||||
|
||||
class SystemMsgSubscriptionByCategorySerializer(serializers.Serializer):
|
||||
category = serializers.CharField()
|
||||
|
|
|
@ -59,7 +59,7 @@ class Migration(migrations.Migration):
|
|||
('name', models.CharField(max_length=128, unique=True, verbose_name='Name')),
|
||||
('interval', models.IntegerField(blank=True, help_text='Units: seconds', null=True, verbose_name='Interval')),
|
||||
('crontab', models.CharField(blank=True, help_text='5 * * * *', max_length=128, null=True, verbose_name='Crontab')),
|
||||
('is_periodic', models.BooleanField(default=False)),
|
||||
('is_periodic', models.BooleanField(default=False, verbose_name='Periodic perform')),
|
||||
('callback', models.CharField(blank=True, max_length=128, null=True, verbose_name='Callback')),
|
||||
('is_deleted', models.BooleanField(default=False)),
|
||||
('comment', models.TextField(blank=True, verbose_name='Comment')),
|
||||
|
|
|
@ -23,7 +23,7 @@ class PeriodTaskModelMixin(models.Model):
|
|||
name = models.CharField(
|
||||
max_length=128, unique=False, verbose_name=_("Name")
|
||||
)
|
||||
is_periodic = models.BooleanField(default=False)
|
||||
is_periodic = models.BooleanField(default=False, verbose_name=_("Periodic perform"))
|
||||
interval = models.IntegerField(
|
||||
default=24, null=True, blank=True,
|
||||
verbose_name=_("Cycle perform"),
|
||||
|
|
|
@ -0,0 +1,18 @@
|
|||
# Generated by Django 3.2.14 on 2022-11-04 07:06
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('perms', '0028_auto_20220316_2028'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AlterField(
|
||||
model_name='applicationpermission',
|
||||
name='type',
|
||||
field=models.CharField(choices=[('mysql', 'MySQL'), ('mariadb', 'MariaDB'), ('oracle', 'Oracle'), ('postgresql', 'PostgreSQL'), ('sqlserver', 'SQLServer'), ('redis', 'Redis'), ('mongodb', 'MongoDB'), ('clickhouse', 'ClickHouse'), ('chrome', 'Chrome'), ('mysql_workbench', 'MySQL Workbench'), ('vmware_client', 'vSphere Client'), ('custom', 'Custom'), ('k8s', 'Kubernetes')], max_length=16, verbose_name='Type'),
|
||||
),
|
||||
]
|
|
@ -6,11 +6,11 @@ from django.utils.translation import ugettext_lazy as _
|
|||
from django.db import models
|
||||
from django.db.models import Q
|
||||
from django.utils import timezone
|
||||
from orgs.mixins.models import OrgModelMixin
|
||||
|
||||
from orgs.mixins.models import OrgModelMixin, OrgManager
|
||||
from common.db.models import UnionQuerySet, BitOperationChoice
|
||||
from common.utils import date_expired_default, lazyproperty
|
||||
from orgs.mixins.models import OrgManager
|
||||
|
||||
|
||||
__all__ = [
|
||||
'BasePermission', 'BasePermissionQuerySet', 'Action'
|
||||
|
|
|
@ -1,5 +1,6 @@
|
|||
# Generated by Django 3.1.13 on 2021-11-19 08:29
|
||||
|
||||
import common.db.models
|
||||
from django.conf import settings
|
||||
import django.contrib.auth.models
|
||||
import django.contrib.contenttypes.models
|
||||
|
@ -84,7 +85,7 @@ class Migration(migrations.Migration):
|
|||
('scope', models.CharField(choices=[('system', 'System'), ('org', 'Organization')], default='system', max_length=128, verbose_name='Scope')),
|
||||
('org', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='role_bindings', to='orgs.organization', verbose_name='Organization')),
|
||||
('role', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='role_bindings', to='rbac.role', verbose_name='Role')),
|
||||
('user', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='role_bindings', to=settings.AUTH_USER_MODEL, verbose_name='User')),
|
||||
('user', models.ForeignKey(on_delete=common.db.models.CASCADE_SIGNAL_SKIP, related_name='role_bindings', to=settings.AUTH_USER_MODEL, verbose_name='User')),
|
||||
],
|
||||
options={
|
||||
'verbose_name': 'Role binding',
|
||||
|
|
|
@ -12,7 +12,7 @@ class Migration(migrations.Migration):
|
|||
operations = [
|
||||
migrations.AlterModelOptions(
|
||||
name='permission',
|
||||
options={'verbose_name': 'Permission'},
|
||||
options={'verbose_name': 'Permissions'},
|
||||
),
|
||||
migrations.AlterModelOptions(
|
||||
name='role',
|
||||
|
|
|
@ -23,7 +23,7 @@ class Permission(DjangoPermission):
|
|||
""" 权限类 """
|
||||
class Meta:
|
||||
proxy = True
|
||||
verbose_name = _('Permission')
|
||||
verbose_name = _('Permissions')
|
||||
|
||||
@classmethod
|
||||
def to_perms(cls, queryset):
|
||||
|
|
|
@ -5,7 +5,7 @@ from django.conf import settings
|
|||
from django.core.exceptions import ValidationError
|
||||
from rest_framework.serializers import ValidationError
|
||||
|
||||
from common.db.models import JMSModel
|
||||
from common.db.models import JMSModel, CASCADE_SIGNAL_SKIP
|
||||
from common.utils import lazyproperty
|
||||
from orgs.utils import current_org, tmp_to_root_org
|
||||
from .role import Role
|
||||
|
@ -38,7 +38,7 @@ class RoleBinding(JMSModel):
|
|||
verbose_name=_('Scope')
|
||||
)
|
||||
user = models.ForeignKey(
|
||||
'users.User', related_name='role_bindings', on_delete=models.CASCADE, verbose_name=_('User')
|
||||
'users.User', related_name='role_bindings', on_delete=CASCADE_SIGNAL_SKIP, verbose_name=_('User')
|
||||
)
|
||||
role = models.ForeignKey(
|
||||
Role, related_name='role_bindings', on_delete=models.CASCADE, verbose_name=_('Role')
|
||||
|
@ -56,7 +56,7 @@ class RoleBinding(JMSModel):
|
|||
]
|
||||
|
||||
def __str__(self):
|
||||
display = '{user} & {role}'.format(user=self.user, role=self.role)
|
||||
display = '{role} -> {user}'.format(user=self.user, role=self.role)
|
||||
if self.org:
|
||||
display += ' | {org}'.format(org=self.org)
|
||||
return display
|
||||
|
|
|
@ -21,8 +21,8 @@ class Migration(migrations.Migration):
|
|||
verbose_name='Name')),
|
||||
('value', models.TextField(verbose_name='Value')),
|
||||
('category',
|
||||
models.CharField(default='default', max_length=128)),
|
||||
('encrypted', models.BooleanField(default=False)),
|
||||
models.CharField(default='default', max_length=128, verbose_name='Category')),
|
||||
('encrypted', models.BooleanField(default=False, verbose_name='Encrypted')),
|
||||
('enabled',
|
||||
models.BooleanField(default=True, verbose_name='Enabled')),
|
||||
('comment', models.TextField(verbose_name='Comment')),
|
||||
|
|
|
@ -32,8 +32,8 @@ class SettingManager(models.Manager):
|
|||
class Setting(models.Model):
|
||||
name = models.CharField(max_length=128, unique=True, verbose_name=_("Name"))
|
||||
value = models.TextField(verbose_name=_("Value"), null=True, blank=True)
|
||||
category = models.CharField(max_length=128, default="default")
|
||||
encrypted = models.BooleanField(default=False)
|
||||
category = models.CharField(max_length=128, default="default", verbose_name=_('Category'))
|
||||
encrypted = models.BooleanField(default=False, verbose_name=_('Encrypted'))
|
||||
enabled = models.BooleanField(verbose_name=_("Enabled"), default=True)
|
||||
comment = models.TextField(verbose_name=_("Comment"))
|
||||
|
||||
|
|
|
@ -7,6 +7,8 @@ __all__ = [
|
|||
|
||||
|
||||
class AuthSettingSerializer(serializers.Serializer):
|
||||
PREFIX_TITLE = '%s-%s' % (_('Authentication'), _('Basic'))
|
||||
|
||||
AUTH_CAS = serializers.BooleanField(required=False, label=_('CAS Auth'))
|
||||
AUTH_OPENID = serializers.BooleanField(required=False, label=_('OPENID Auth'))
|
||||
AUTH_RADIUS = serializers.BooleanField(required=False, label=_('RADIUS Auth'))
|
||||
|
|
|
@ -7,6 +7,8 @@ __all__ = [
|
|||
|
||||
|
||||
class CASSettingSerializer(serializers.Serializer):
|
||||
PREFIX_TITLE = '%s-%s' % (_('Authentication'), _('CAS'))
|
||||
|
||||
AUTH_CAS = serializers.BooleanField(required=False, label=_('Enable CAS Auth'))
|
||||
CAS_SERVER_URL = serializers.CharField(required=False, max_length=1024, label=_('Server url'))
|
||||
CAS_ROOT_PROXIED_AS = serializers.CharField(
|
||||
|
|
|
@ -7,6 +7,8 @@ __all__ = ['DingTalkSettingSerializer']
|
|||
|
||||
|
||||
class DingTalkSettingSerializer(serializers.Serializer):
|
||||
PREFIX_TITLE = '%s-%s' % (_('Authentication'), _('DingTalk'))
|
||||
|
||||
DINGTALK_AGENTID = serializers.CharField(max_length=256, required=True, label='AgentId')
|
||||
DINGTALK_APPKEY = serializers.CharField(max_length=256, required=True, label='AppKey')
|
||||
DINGTALK_APPSECRET = EncryptedField(max_length=256, required=False, label='AppSecret')
|
||||
|
|
|
@ -7,6 +7,8 @@ __all__ = ['FeiShuSettingSerializer']
|
|||
|
||||
|
||||
class FeiShuSettingSerializer(serializers.Serializer):
|
||||
PREFIX_TITLE = '%s-%s' % (_('Authentication'), _('FeiShu'))
|
||||
|
||||
FEISHU_APP_ID = serializers.CharField(max_length=256, required=True, label='App ID')
|
||||
FEISHU_APP_SECRET = EncryptedField(max_length=256, required=False, label='App Secret')
|
||||
AUTH_FEISHU = serializers.BooleanField(default=False, label=_('Enable FeiShu Auth'))
|
||||
|
|
|
@ -36,6 +36,7 @@ class LDAPUserSerializer(serializers.Serializer):
|
|||
|
||||
class LDAPSettingSerializer(serializers.Serializer):
|
||||
# encrypt_fields 现在使用 write_only 来判断了
|
||||
PREFIX_TITLE = '%s-%s' % (_('Authentication'), _('LDAP'))
|
||||
|
||||
AUTH_LDAP_SERVER_URI = serializers.CharField(
|
||||
required=True, max_length=1024, label=_('LDAP server'),
|
||||
|
|
|
@ -16,6 +16,8 @@ class SettingImageField(serializers.ImageField):
|
|||
|
||||
|
||||
class OAuth2SettingSerializer(serializers.Serializer):
|
||||
PREFIX_TITLE = '%s-%s' % (_('Authentication'), _('OAuth2'))
|
||||
|
||||
AUTH_OAUTH2 = serializers.BooleanField(
|
||||
default=False, label=_('Enable OAuth2 Auth')
|
||||
)
|
||||
|
|
|
@ -9,6 +9,7 @@ __all__ = [
|
|||
|
||||
|
||||
class CommonSettingSerializer(serializers.Serializer):
|
||||
PREFIX_TITLE = '%s-%s' % (_('Authentication'), _('OIDC'))
|
||||
# OpenID 公有配置参数 (version <= 1.5.8 或 version >= 1.5.8)
|
||||
BASE_SITE_URL = serializers.CharField(
|
||||
required=False, allow_null=True, allow_blank=True,
|
||||
|
|
|
@ -10,6 +10,8 @@ __all__ = ['RadiusSettingSerializer']
|
|||
|
||||
|
||||
class RadiusSettingSerializer(serializers.Serializer):
|
||||
PREFIX_TITLE = '%s-%s' % (_('Authentication'), _('Radius'))
|
||||
|
||||
AUTH_RADIUS = serializers.BooleanField(required=False, label=_('Enable Radius Auth'))
|
||||
RADIUS_SERVER = serializers.CharField(required=False, allow_blank=True, max_length=1024, label=_('Host'))
|
||||
RADIUS_PORT = serializers.IntegerField(required=False, label=_('Port'))
|
||||
|
|
|
@ -8,6 +8,8 @@ __all__ = [
|
|||
|
||||
|
||||
class SAML2SettingSerializer(serializers.Serializer):
|
||||
PREFIX_TITLE = '%s-%s' % (_('Authentication'), _('SAML2'))
|
||||
|
||||
AUTH_SAML2 = serializers.BooleanField(
|
||||
default=False, required=False, label=_('Enable SAML2 Auth')
|
||||
)
|
||||
|
|
|
@ -24,6 +24,8 @@ class SignTmplPairSerializer(serializers.Serializer):
|
|||
|
||||
|
||||
class BaseSMSSettingSerializer(serializers.Serializer):
|
||||
PREFIX_TITLE = _('SMS')
|
||||
|
||||
SMS_TEST_PHONE = serializers.CharField(
|
||||
max_length=256, required=False, validators=[PhoneValidator(), ],
|
||||
allow_blank=True, label=_('Test phone')
|
||||
|
@ -38,7 +40,7 @@ class BaseSMSSettingSerializer(serializers.Serializer):
|
|||
class AlibabaSMSSettingSerializer(BaseSMSSettingSerializer):
|
||||
ALIBABA_ACCESS_KEY_ID = serializers.CharField(max_length=256, required=True, label='AccessKeyId')
|
||||
ALIBABA_ACCESS_KEY_SECRET = EncryptedField(
|
||||
max_length=256, required=False, label='AccessKeySecret',
|
||||
max_length=256, required=False, label='access_key_secret',
|
||||
)
|
||||
ALIBABA_VERIFY_SIGN_NAME = serializers.CharField(max_length=256, required=True, label=_('Signature'))
|
||||
ALIBABA_VERIFY_TEMPLATE_CODE = serializers.CharField(max_length=256, required=True, label=_('Template code'))
|
||||
|
|
|
@ -7,6 +7,8 @@ __all__ = [
|
|||
|
||||
|
||||
class SSOSettingSerializer(serializers.Serializer):
|
||||
PREFIX_TITLE = '%s-%s' % (_('Authentication'), _('SSO'))
|
||||
|
||||
AUTH_SSO = serializers.BooleanField(
|
||||
required=False, label=_('Enable SSO auth'),
|
||||
help_text=_("Other service can using SSO token login to JumpServer without password")
|
||||
|
|
|
@ -7,6 +7,8 @@ __all__ = ['WeComSettingSerializer']
|
|||
|
||||
|
||||
class WeComSettingSerializer(serializers.Serializer):
|
||||
PREFIX_TITLE = '%s-%s' % (_('Authentication'), _('WeCom'))
|
||||
|
||||
WECOM_CORPID = serializers.CharField(max_length=256, required=True, label='corpid')
|
||||
WECOM_AGENTID = serializers.CharField(max_length=256, required=True, label='agentid')
|
||||
WECOM_SECRET = EncryptedField(max_length=256, required=False, label='secret')
|
||||
|
|
|
@ -24,6 +24,8 @@ class AnnouncementSerializer(serializers.Serializer):
|
|||
|
||||
|
||||
class BasicSettingSerializer(serializers.Serializer):
|
||||
PREFIX_TITLE = _('Basic')
|
||||
|
||||
SITE_URL = serializers.URLField(
|
||||
required=True, label=_("Site url"),
|
||||
help_text=_('eg: http://dev.jumpserver.org:8080')
|
||||
|
|
|
@ -5,6 +5,8 @@ __all__ = ['CleaningSerializer']
|
|||
|
||||
|
||||
class CleaningSerializer(serializers.Serializer):
|
||||
PREFIX_TITLE = _('Period clean')
|
||||
|
||||
LOGIN_LOG_KEEP_DAYS = serializers.IntegerField(
|
||||
min_value=1, max_value=9999,
|
||||
label=_("Login log keep days"), help_text=_("Unit: day")
|
||||
|
|
|
@ -16,6 +16,7 @@ class MailTestSerializer(serializers.Serializer):
|
|||
|
||||
class EmailSettingSerializer(serializers.Serializer):
|
||||
# encrypt_fields 现在使用 write_only 来判断了
|
||||
PREFIX_TITLE = _('Email')
|
||||
|
||||
EMAIL_HOST = serializers.CharField(max_length=1024, required=True, label=_("SMTP host"))
|
||||
EMAIL_PORT = serializers.CharField(max_length=5, required=True, label=_("SMTP port"))
|
||||
|
@ -46,6 +47,8 @@ class EmailSettingSerializer(serializers.Serializer):
|
|||
|
||||
|
||||
class EmailContentSettingSerializer(serializers.Serializer):
|
||||
PREFIX_TITLE = _('Email')
|
||||
|
||||
EMAIL_CUSTOM_USER_CREATED_SUBJECT = serializers.CharField(
|
||||
max_length=1024, allow_blank=True, required=False,
|
||||
label=_('Create user email subject'),
|
||||
|
|
|
@ -3,6 +3,8 @@ from rest_framework import serializers
|
|||
|
||||
|
||||
class OtherSettingSerializer(serializers.Serializer):
|
||||
PREFIX_TITLE = _('More...')
|
||||
|
||||
EMAIL_SUFFIX = serializers.CharField(
|
||||
required=False, max_length=1024, label=_("Email suffix"),
|
||||
help_text=_('This is used by default if no email is returned during SSO authentication')
|
||||
|
|
|
@ -40,6 +40,8 @@ class PrivateSettingSerializer(PublicSettingSerializer):
|
|||
TERMINAL_KOKO_SSH_ENABLED = serializers.BooleanField()
|
||||
TERMINAL_OMNIDB_ENABLED = serializers.BooleanField()
|
||||
|
||||
TERMINAL_GRAPHICAL_RESOLUTION = serializers.CharField()
|
||||
|
||||
ANNOUNCEMENT_ENABLED = serializers.BooleanField()
|
||||
ANNOUNCEMENT = serializers.DictField()
|
||||
|
||||
|
|
|
@ -143,6 +143,8 @@ class SecurityAuthSerializer(serializers.Serializer):
|
|||
|
||||
|
||||
class SecuritySettingSerializer(SecurityPasswordRuleSerializer, SecurityAuthSerializer):
|
||||
PREFIX_TITLE = _('Security')
|
||||
|
||||
SECURITY_SERVICE_ACCOUNT_REGISTRATION = serializers.BooleanField(
|
||||
required=True, label=_('Enable terminal register'),
|
||||
help_text=_(
|
||||
|
|
|
@ -1,4 +1,6 @@
|
|||
# coding: utf-8
|
||||
from django.core.cache import cache
|
||||
from django.utils.translation import ugettext_lazy as _
|
||||
|
||||
from .basic import BasicSettingSerializer
|
||||
from .other import OtherSettingSerializer
|
||||
|
@ -7,7 +9,8 @@ from .auth import (
|
|||
LDAPSettingSerializer, OIDCSettingSerializer, KeycloakSettingSerializer,
|
||||
CASSettingSerializer, RadiusSettingSerializer, FeiShuSettingSerializer,
|
||||
WeComSettingSerializer, DingTalkSettingSerializer, AlibabaSMSSettingSerializer,
|
||||
TencentSMSSettingSerializer, CMPP2SMSSettingSerializer
|
||||
TencentSMSSettingSerializer, CMPP2SMSSettingSerializer, AuthSettingSerializer,
|
||||
SAML2SettingSerializer, OAuth2SettingSerializer, SSOSettingSerializer
|
||||
)
|
||||
from .terminal import TerminalSettingSerializer
|
||||
from .security import SecuritySettingSerializer
|
||||
|
@ -22,6 +25,7 @@ __all__ = [
|
|||
class SettingsSerializer(
|
||||
BasicSettingSerializer,
|
||||
LDAPSettingSerializer,
|
||||
AuthSettingSerializer,
|
||||
TerminalSettingSerializer,
|
||||
SecuritySettingSerializer,
|
||||
WeComSettingSerializer,
|
||||
|
@ -31,13 +35,33 @@ class SettingsSerializer(
|
|||
EmailContentSettingSerializer,
|
||||
OtherSettingSerializer,
|
||||
OIDCSettingSerializer,
|
||||
SAML2SettingSerializer,
|
||||
OAuth2SettingSerializer,
|
||||
KeycloakSettingSerializer,
|
||||
CASSettingSerializer,
|
||||
RadiusSettingSerializer,
|
||||
SSOSettingSerializer,
|
||||
CleaningSerializer,
|
||||
AlibabaSMSSettingSerializer,
|
||||
TencentSMSSettingSerializer,
|
||||
CMPP2SMSSettingSerializer,
|
||||
):
|
||||
CACHE_KEY = 'SETTING_FIELDS_MAPPING'
|
||||
|
||||
# encrypt_fields 现在使用 write_only 来判断了
|
||||
pass
|
||||
def __init__(self, **kwargs):
|
||||
super().__init__(**kwargs)
|
||||
self.fields_label_mapping = None
|
||||
|
||||
# 单次计算量不大,搞个缓存,以防操作日志大量写入时,这里影响性能
|
||||
def get_field_label(self, field_name):
|
||||
if self.fields_label_mapping is None:
|
||||
self.fields_label_mapping = {}
|
||||
for subclass in SettingsSerializer.__bases__:
|
||||
prefix = getattr(subclass, 'PREFIX_TITLE', _('Setting'))
|
||||
fields = subclass().get_fields()
|
||||
for name, item in fields.items():
|
||||
label = '[%s] %s' % (prefix, getattr(item, 'label', ''))
|
||||
self.fields_label_mapping[name] = label
|
||||
cache.set(self.CACHE_KEY, self.fields_label_mapping, 3600 * 24)
|
||||
return self.fields_label_mapping.get(field_name)
|
||||
|
|
|
@ -3,6 +3,8 @@ from rest_framework import serializers
|
|||
|
||||
|
||||
class TerminalSettingSerializer(serializers.Serializer):
|
||||
PREFIX_TITLE = _('Terminal')
|
||||
|
||||
SORT_BY_CHOICES = (
|
||||
('hostname', _('Hostname')),
|
||||
('ip', _('IP'))
|
||||
|
@ -36,3 +38,16 @@ class TerminalSettingSerializer(serializers.Serializer):
|
|||
TERMINAL_MAGNUS_ENABLED = serializers.BooleanField(label=_("Enable database proxy"))
|
||||
TERMINAL_RAZOR_ENABLED = serializers.BooleanField(label=_("Enable Razor"))
|
||||
TERMINAL_KOKO_SSH_ENABLED = serializers.BooleanField(label=_("Enable SSH Client"))
|
||||
|
||||
RESOLUTION_CHOICES = (
|
||||
('Auto', 'Auto'),
|
||||
('1024x768', '1024x768'),
|
||||
('1366x768', '1366x768'),
|
||||
('1600x900', '1600x900'),
|
||||
('1920x1080', '1920x1080')
|
||||
)
|
||||
TERMINAL_GRAPHICAL_RESOLUTION = serializers.ChoiceField(
|
||||
default='Auto', choices=RESOLUTION_CHOICES, required=False,
|
||||
label=_('Default graphics resolution'),
|
||||
help_text=_('Tip: Default resolution to use when connecting graphical assets in Luna pages')
|
||||
)
|
||||
|
|
|
@ -16,17 +16,17 @@
|
|||
{% for backend in mfa_backends %}
|
||||
<div id="mfa-{{ backend.name }}" class="mfa-field
|
||||
{% if backend.challenge_required %}challenge-required{% endif %}"
|
||||
style="display: none"
|
||||
style="display: none"
|
||||
>
|
||||
<input type="text" class="form-control input-style"
|
||||
placeholder="{{ backend.placeholder }}"
|
||||
>
|
||||
{% if backend.challenge_required %}
|
||||
<button class="btn btn-primary full-width btn-challenge"
|
||||
type='button' onclick="sendChallengeCode(this)"
|
||||
>
|
||||
{% trans 'Send verification code' %}
|
||||
</button>
|
||||
<button class="btn btn-primary full-width btn-challenge"
|
||||
type='button' onclick="sendChallengeCode(this)"
|
||||
>
|
||||
{% trans 'Send verification code' %}
|
||||
</button>
|
||||
{% endif %}
|
||||
</div>
|
||||
{% endfor %}
|
||||
|
@ -39,12 +39,12 @@
|
|||
}
|
||||
|
||||
.challenge-required .input-style {
|
||||
width: calc(100% - 114px);
|
||||
width: calc(100% - 160px);
|
||||
display: inline-block;
|
||||
}
|
||||
|
||||
.btn-challenge {
|
||||
width: 110px !important;
|
||||
width: 156px !important;
|
||||
height: 100%;
|
||||
vertical-align: top;
|
||||
}
|
||||
|
@ -74,7 +74,7 @@
|
|||
localStorage.setItem(preferMFAKey, name)
|
||||
}
|
||||
|
||||
$('.input-style').each(function (i, ele){
|
||||
$('.input-style').each(function (i, ele) {
|
||||
$(ele).attr('name', 'code-test')
|
||||
})
|
||||
|
||||
|
@ -112,7 +112,7 @@
|
|||
clearInterval(interval)
|
||||
}
|
||||
}, 1000)
|
||||
setTimeout(function (){
|
||||
setTimeout(function () {
|
||||
toastr.success("{% trans 'The verification code has been sent' %}");
|
||||
})
|
||||
}
|
||||
|
|
|
@ -37,15 +37,21 @@ p {
|
|||
</ul>
|
||||
</div>
|
||||
|
||||
{% if XPACK_ENABLED %}
|
||||
|
||||
<div class="group">
|
||||
<h2>{% trans 'Windows Remote application publisher tools' %} v2.0</h2>
|
||||
<p>{% trans 'Jmservisor is the program used to pull up remote applications in Windows Remote Application publisher' %}</p>
|
||||
<ul>
|
||||
<li><a href="/download/Jmservisor.msi">jmservisor.msi</a></li>
|
||||
</ul>
|
||||
<h2>{% trans 'Windows Remote application publisher tools' %}</h2>
|
||||
<p>{% trans 'OpenSSH is a program used to connect remote applications in the Windows Remote Application Publisher' %}</p>
|
||||
<ul>
|
||||
<li><a href="/download/OpenSSH-Win64.msi">openSSH-win64.msi</a></li>
|
||||
</ul>
|
||||
{% if XPACK_ENABLED %}
|
||||
<p>{% trans 'Jmservisor is the program used to pull up remote applications in Windows Remote Application publisher' %}</p>
|
||||
<ul>
|
||||
<li><a href="/download/Jmservisor.msi">jmservisor.msi</a></li>
|
||||
</ul>
|
||||
{% endif %}
|
||||
</div>
|
||||
{% endif %}
|
||||
|
||||
|
||||
<div class="group">
|
||||
<h2>JumpServer {% trans 'Offline video player' %} v0.1.5</h2>
|
||||
|
|
|
@ -1,109 +1,18 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
#
|
||||
import pytz
|
||||
import inspect
|
||||
|
||||
from datetime import datetime
|
||||
from functools import reduce, partial
|
||||
from itertools import groupby
|
||||
from uuid import UUID
|
||||
|
||||
from django.utils.translation import gettext_lazy as _
|
||||
from django.db.models import QuerySet as DJQuerySet
|
||||
from elasticsearch import Elasticsearch
|
||||
from elasticsearch.helpers import bulk
|
||||
from elasticsearch.exceptions import RequestError, NotFoundError
|
||||
|
||||
from common.utils.common import lazyproperty
|
||||
from common.utils import get_logger
|
||||
from common.utils.timezone import local_now_date_display, utc_now
|
||||
from common.exceptions import JMSException
|
||||
from terminal.models import Command
|
||||
from common.plugins.es import ES
|
||||
|
||||
|
||||
logger = get_logger(__file__)
|
||||
|
||||
|
||||
class InvalidElasticsearch(JMSException):
|
||||
default_code = 'invalid_elasticsearch'
|
||||
default_detail = _('Invalid elasticsearch config')
|
||||
|
||||
|
||||
class NotSupportElasticsearch8(JMSException):
|
||||
default_code = 'not_support_elasticsearch8'
|
||||
default_detail = _('Not Support Elasticsearch8')
|
||||
|
||||
|
||||
class CommandStore(object):
|
||||
class CommandStore(ES):
|
||||
def __init__(self, config):
|
||||
self.doc_type = config.get("DOC_TYPE") or '_doc'
|
||||
self.index_prefix = config.get('INDEX') or 'jumpserver'
|
||||
self.is_index_by_date = bool(config.get('INDEX_BY_DATE'))
|
||||
self.exact_fields = {}
|
||||
self.match_fields = {}
|
||||
hosts = config.get("HOSTS")
|
||||
kwargs = config.get("OTHER", {})
|
||||
|
||||
ignore_verify_certs = kwargs.pop('IGNORE_VERIFY_CERTS', False)
|
||||
if ignore_verify_certs:
|
||||
kwargs['verify_certs'] = None
|
||||
self.es = Elasticsearch(hosts=hosts, max_retries=0, **kwargs)
|
||||
|
||||
self.exact_fields = set()
|
||||
self.match_fields = {'input', 'risk_level', 'user', 'asset', 'system_user'}
|
||||
may_exact_fields = {'session', 'org_id'}
|
||||
|
||||
if self.is_new_index_type():
|
||||
self.exact_fields.update(may_exact_fields)
|
||||
self.doc_type = '_doc'
|
||||
else:
|
||||
self.match_fields.update(may_exact_fields)
|
||||
|
||||
self.init_index(config)
|
||||
|
||||
def init_index(self, config):
|
||||
if self.is_index_by_date:
|
||||
date = local_now_date_display()
|
||||
self.index = '%s-%s' % (self.index_prefix, date)
|
||||
self.query_index = '%s-alias' % self.index_prefix
|
||||
else:
|
||||
self.index = config.get("INDEX") or 'jumpserver'
|
||||
self.query_index = config.get("INDEX") or 'jumpserver'
|
||||
|
||||
def is_new_index_type(self):
|
||||
if not self.ping(timeout=3):
|
||||
return False
|
||||
|
||||
info = self.es.info()
|
||||
version = info['version']['number'].split('.')[0]
|
||||
|
||||
if version == '8':
|
||||
raise NotSupportElasticsearch8
|
||||
|
||||
try:
|
||||
# 获取索引信息,如果没有定义,直接返回
|
||||
data = self.es.indices.get_mapping(self.index)
|
||||
except NotFoundError:
|
||||
return False
|
||||
|
||||
try:
|
||||
if version == '6':
|
||||
# 检测索引是不是新的类型 es6
|
||||
properties = data[self.index]['mappings']['data']['properties']
|
||||
else:
|
||||
# 检测索引是不是新的类型 es7 default index type: _doc
|
||||
properties = data[self.index]['mappings']['properties']
|
||||
if properties['session']['type'] == 'keyword' \
|
||||
and properties['org_id']['type'] == 'keyword':
|
||||
return True
|
||||
except KeyError:
|
||||
return False
|
||||
|
||||
def pre_use_check(self):
|
||||
if not self.ping(timeout=3):
|
||||
raise InvalidElasticsearch
|
||||
self._ensure_index_exists()
|
||||
|
||||
def _ensure_index_exists(self):
|
||||
properties = {
|
||||
"session": {
|
||||
"type": "keyword"
|
||||
|
@ -118,25 +27,11 @@ class CommandStore(object):
|
|||
"type": "long"
|
||||
}
|
||||
}
|
||||
info = self.es.info()
|
||||
version = info['version']['number'].split('.')[0]
|
||||
if version == '6':
|
||||
mappings = {'mappings': {'data': {'properties': properties}}}
|
||||
else:
|
||||
mappings = {'mappings': {'properties': properties}}
|
||||
exact_fields = {}
|
||||
match_fields = {'input', 'risk_level', 'user', 'asset', 'system_user'}
|
||||
keyword_fields = {'session', 'org_id'}
|
||||
|
||||
if self.is_index_by_date:
|
||||
mappings['aliases'] = {
|
||||
self.query_index: {}
|
||||
}
|
||||
try:
|
||||
self.es.indices.create(self.index, body=mappings)
|
||||
return
|
||||
except RequestError as e:
|
||||
if e.error == 'resource_already_exists_exception':
|
||||
logger.warning(e)
|
||||
else:
|
||||
logger.exception(e)
|
||||
super().__init__(config, properties, keyword_fields, exact_fields, match_fields)
|
||||
|
||||
@staticmethod
|
||||
def make_data(command):
|
||||
|
@ -150,274 +45,14 @@ class CommandStore(object):
|
|||
data["date"] = datetime.fromtimestamp(command['timestamp'], tz=pytz.UTC)
|
||||
return data
|
||||
|
||||
def bulk_save(self, command_set, raise_on_error=True):
|
||||
actions = []
|
||||
for command in command_set:
|
||||
data = dict(
|
||||
_index=self.index,
|
||||
_type=self.doc_type,
|
||||
_source=self.make_data(command),
|
||||
)
|
||||
actions.append(data)
|
||||
return bulk(self.es, actions, index=self.index, raise_on_error=raise_on_error)
|
||||
|
||||
def save(self, command):
|
||||
"""
|
||||
保存命令到数据库
|
||||
"""
|
||||
data = self.make_data(command)
|
||||
return self.es.index(index=self.index, doc_type=self.doc_type, body=data)
|
||||
|
||||
def filter(self, query: dict, from_=None, size=None, sort=None):
|
||||
try:
|
||||
data = self._filter(query, from_, size, sort)
|
||||
except Exception as e:
|
||||
logger.error('ES filter error: {}'.format(e))
|
||||
data = []
|
||||
return data
|
||||
|
||||
def _filter(self, query: dict, from_=None, size=None, sort=None):
|
||||
body = self.get_query_body(**query)
|
||||
|
||||
data = self.es.search(
|
||||
index=self.query_index, doc_type=self.doc_type, body=body, from_=from_, size=size,
|
||||
sort=sort
|
||||
)
|
||||
source_data = []
|
||||
for item in data['hits']['hits']:
|
||||
if item:
|
||||
item['_source'].update({'id': item['_id']})
|
||||
source_data.append(item['_source'])
|
||||
|
||||
return Command.from_multi_dict(source_data)
|
||||
|
||||
def count(self, **query):
|
||||
try:
|
||||
body = self.get_query_body(**query)
|
||||
data = self.es.count(index=self.query_index, doc_type=self.doc_type, body=body)
|
||||
count = data["count"]
|
||||
except Exception as e:
|
||||
logger.error('ES count error: {}'.format(e))
|
||||
count = 0
|
||||
return count
|
||||
|
||||
def __getattr__(self, item):
|
||||
return getattr(self.es, item)
|
||||
|
||||
def all(self):
|
||||
"""返回所有数据"""
|
||||
raise NotImplementedError("Not support")
|
||||
|
||||
def ping(self, timeout=None):
|
||||
try:
|
||||
return self.es.ping(request_timeout=timeout)
|
||||
except Exception:
|
||||
return False
|
||||
|
||||
def get_query_body(self, **kwargs):
|
||||
new_kwargs = {}
|
||||
for k, v in kwargs.items():
|
||||
new_kwargs[k] = str(v) if isinstance(v, UUID) else v
|
||||
kwargs = new_kwargs
|
||||
|
||||
index_in_field = 'id__in'
|
||||
exact_fields = self.exact_fields
|
||||
match_fields = self.match_fields
|
||||
|
||||
match = {}
|
||||
exact = {}
|
||||
index = {}
|
||||
|
||||
if index_in_field in kwargs:
|
||||
index['values'] = kwargs[index_in_field]
|
||||
|
||||
for k, v in kwargs.items():
|
||||
if k in exact_fields:
|
||||
exact[k] = v
|
||||
elif k in match_fields:
|
||||
match[k] = v
|
||||
|
||||
# 处理时间
|
||||
timestamp__gte = kwargs.get('timestamp__gte')
|
||||
timestamp__lte = kwargs.get('timestamp__lte')
|
||||
@staticmethod
|
||||
def handler_time_field(data):
|
||||
timestamp__gte = data.get('timestamp__gte')
|
||||
timestamp__lte = data.get('timestamp__lte')
|
||||
timestamp_range = {}
|
||||
|
||||
if timestamp__gte:
|
||||
timestamp_range['gte'] = timestamp__gte
|
||||
if timestamp__lte:
|
||||
timestamp_range['lte'] = timestamp__lte
|
||||
|
||||
# 处理组织
|
||||
should = []
|
||||
org_id = match.get('org_id')
|
||||
|
||||
real_default_org_id = '00000000-0000-0000-0000-000000000002'
|
||||
root_org_id = '00000000-0000-0000-0000-000000000000'
|
||||
|
||||
if org_id == root_org_id:
|
||||
match.pop('org_id')
|
||||
elif org_id in (real_default_org_id, ''):
|
||||
match.pop('org_id')
|
||||
should.append({
|
||||
'bool': {
|
||||
'must_not': [
|
||||
{
|
||||
'wildcard': {'org_id': '*'}
|
||||
}
|
||||
]}
|
||||
})
|
||||
should.append({'match': {'org_id': real_default_org_id}})
|
||||
|
||||
# 构建 body
|
||||
body = {
|
||||
'query': {
|
||||
'bool': {
|
||||
'must': [
|
||||
{'match': {k: v}} for k, v in match.items()
|
||||
],
|
||||
'should': should,
|
||||
'filter': [
|
||||
{
|
||||
'term': {k: v}
|
||||
} for k, v in exact.items()
|
||||
] + [
|
||||
{
|
||||
'range': {
|
||||
'timestamp': timestamp_range
|
||||
}
|
||||
}
|
||||
] + [
|
||||
{
|
||||
'ids': {k: v}
|
||||
} for k, v in index.items()
|
||||
]
|
||||
}
|
||||
},
|
||||
}
|
||||
return body
|
||||
|
||||
|
||||
class QuerySet(DJQuerySet):
|
||||
_method_calls = None
|
||||
_storage = None
|
||||
_command_store_config = None
|
||||
_slice = None # (from_, size)
|
||||
default_days_ago = 5
|
||||
max_result_window = 10000
|
||||
|
||||
def __init__(self, command_store_config):
|
||||
self._method_calls = []
|
||||
self._command_store_config = command_store_config
|
||||
self._storage = CommandStore(command_store_config)
|
||||
|
||||
# 命令列表模糊搜索时报错
|
||||
super().__init__()
|
||||
|
||||
@lazyproperty
|
||||
def _grouped_method_calls(self):
|
||||
_method_calls = {k: list(v) for k, v in groupby(self._method_calls, lambda x: x[0])}
|
||||
return _method_calls
|
||||
|
||||
@lazyproperty
|
||||
def _filter_kwargs(self):
|
||||
_method_calls = self._grouped_method_calls
|
||||
filter_calls = _method_calls.get('filter')
|
||||
if not filter_calls:
|
||||
return {}
|
||||
names, multi_args, multi_kwargs = zip(*filter_calls)
|
||||
kwargs = reduce(lambda x, y: {**x, **y}, multi_kwargs, {})
|
||||
|
||||
striped_kwargs = {}
|
||||
for k, v in kwargs.items():
|
||||
k = k.replace('__exact', '')
|
||||
k = k.replace('__startswith', '')
|
||||
k = k.replace('__icontains', '')
|
||||
striped_kwargs[k] = v
|
||||
return striped_kwargs
|
||||
|
||||
@lazyproperty
|
||||
def _sort(self):
|
||||
order_by = self._grouped_method_calls.get('order_by')
|
||||
if order_by:
|
||||
for call in reversed(order_by):
|
||||
fields = call[1]
|
||||
if fields:
|
||||
field = fields[-1]
|
||||
|
||||
if field.startswith('-'):
|
||||
direction = 'desc'
|
||||
else:
|
||||
direction = 'asc'
|
||||
field = field.lstrip('-+')
|
||||
sort = f'{field}:{direction}'
|
||||
return sort
|
||||
|
||||
def __execute(self):
|
||||
_filter_kwargs = self._filter_kwargs
|
||||
_sort = self._sort
|
||||
from_, size = self._slice or (None, None)
|
||||
data = self._storage.filter(_filter_kwargs, from_=from_, size=size, sort=_sort)
|
||||
return data
|
||||
|
||||
def __stage_method_call(self, item, *args, **kwargs):
|
||||
_clone = self.__clone()
|
||||
_clone._method_calls.append((item, args, kwargs))
|
||||
return _clone
|
||||
|
||||
def __clone(self):
|
||||
uqs = QuerySet(self._command_store_config)
|
||||
uqs._method_calls = self._method_calls.copy()
|
||||
uqs._slice = self._slice
|
||||
uqs.model = self.model
|
||||
return uqs
|
||||
|
||||
def count(self, limit_to_max_result_window=True):
|
||||
filter_kwargs = self._filter_kwargs
|
||||
count = self._storage.count(**filter_kwargs)
|
||||
if limit_to_max_result_window:
|
||||
count = min(count, self.max_result_window)
|
||||
return count
|
||||
|
||||
def __getattribute__(self, item):
|
||||
if any((
|
||||
item.startswith('__'),
|
||||
item in QuerySet.__dict__,
|
||||
)):
|
||||
return object.__getattribute__(self, item)
|
||||
|
||||
origin_attr = object.__getattribute__(self, item)
|
||||
if not inspect.ismethod(origin_attr):
|
||||
return origin_attr
|
||||
|
||||
attr = partial(self.__stage_method_call, item)
|
||||
return attr
|
||||
|
||||
def __getitem__(self, item):
|
||||
max_window = self.max_result_window
|
||||
if isinstance(item, slice):
|
||||
if self._slice is None:
|
||||
clone = self.__clone()
|
||||
from_ = item.start or 0
|
||||
if item.stop is None:
|
||||
size = self.max_result_window - from_
|
||||
else:
|
||||
size = item.stop - from_
|
||||
|
||||
if from_ + size > max_window:
|
||||
if from_ >= max_window:
|
||||
from_ = max_window
|
||||
size = 0
|
||||
else:
|
||||
size = max_window - from_
|
||||
clone._slice = (from_, size)
|
||||
return clone
|
||||
return self.__execute()[item]
|
||||
|
||||
def __repr__(self):
|
||||
return self.__execute().__repr__()
|
||||
|
||||
def __iter__(self):
|
||||
return iter(self.__execute())
|
||||
|
||||
def __len__(self):
|
||||
return self.count()
|
||||
return 'timestamp', timestamp_range
|
||||
|
|
|
@ -0,0 +1,18 @@
|
|||
# Generated by Django 3.2.14 on 2022-11-04 07:06
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('terminal', '0053_auto_20221009_1755'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AlterField(
|
||||
model_name='session',
|
||||
name='protocol',
|
||||
field=models.CharField(choices=[('ssh', 'ssh'), ('rdp', 'rdp'), ('vnc', 'vnc'), ('telnet', 'telnet'), ('mysql', 'mysql'), ('oracle', 'oracle'), ('mariadb', 'mariadb'), ('sqlserver', 'sqlserver'), ('postgresql', 'postgresql'), ('redis', 'redis'), ('mongodb', 'MongoDB'), ('clickhouse', 'ClickHouse'), ('k8s', 'kubernetes')], db_index=True, default='ssh', max_length=16),
|
||||
),
|
||||
]
|
|
@ -38,6 +38,7 @@ class Session(OrgModelMixin):
|
|||
POSTGRESQL = 'postgresql', 'postgresql'
|
||||
REDIS = 'redis', 'redis'
|
||||
MONGODB = 'mongodb', 'MongoDB'
|
||||
CLICKHOUSE = 'clickhouse', 'ClickHouse'
|
||||
K8S = 'k8s', 'kubernetes'
|
||||
|
||||
id = models.UUIDField(default=uuid.uuid4, primary_key=True)
|
||||
|
@ -150,7 +151,7 @@ class Session(OrgModelMixin):
|
|||
def db_protocols(self):
|
||||
_PROTOCOL = self.PROTOCOL
|
||||
return [_PROTOCOL.MYSQL, _PROTOCOL.MARIADB, _PROTOCOL.ORACLE,
|
||||
_PROTOCOL.POSTGRESQL, _PROTOCOL.SQLSERVER,
|
||||
_PROTOCOL.POSTGRESQL, _PROTOCOL.SQLSERVER, _PROTOCOL.CLICKHOUSE,
|
||||
_PROTOCOL.REDIS, _PROTOCOL.MONGODB]
|
||||
|
||||
@property
|
||||
|
|
|
@ -10,6 +10,7 @@ from django.db import models
|
|||
from django.utils.translation import ugettext_lazy as _
|
||||
from django.conf import settings
|
||||
from common.mixins import CommonModelMixin
|
||||
from common.plugins.es import QuerySet as ESQuerySet
|
||||
from common.utils import get_logger
|
||||
from common.db.fields import EncryptJsonDictTextField
|
||||
from common.utils.timezone import local_now_date_display
|
||||
|
@ -117,7 +118,8 @@ class CommandStorage(CommonStorageModelMixin, CommonModelMixin):
|
|||
|
||||
if self.type in TYPE_ENGINE_MAPPING:
|
||||
engine_mod = import_module(TYPE_ENGINE_MAPPING[self.type])
|
||||
qs = engine_mod.QuerySet(self.config)
|
||||
store = engine_mod.CommandStore(self.config)
|
||||
qs = ESQuerySet(store)
|
||||
qs.model = Command
|
||||
return qs
|
||||
|
||||
|
|
|
@ -6,6 +6,7 @@ from django.utils.translation import ugettext_lazy as _
|
|||
from django.conf import settings
|
||||
|
||||
from common.utils import get_logger
|
||||
from common.const.signals import SKIP_SIGNAL
|
||||
from users.models import User
|
||||
from orgs.utils import tmp_to_root_org
|
||||
from .status import Status
|
||||
|
@ -107,8 +108,8 @@ class Terminal(StorageMixin, TerminalStatusMixin, models.Model):
|
|||
http_port = models.IntegerField(verbose_name=_('HTTP Port'), default=5000)
|
||||
command_storage = models.CharField(max_length=128, verbose_name=_("Command storage"), default='default')
|
||||
replay_storage = models.CharField(max_length=128, verbose_name=_("Replay storage"), default='default')
|
||||
user = models.OneToOneField(User, related_name='terminal', verbose_name='Application User', null=True, on_delete=models.CASCADE)
|
||||
is_accepted = models.BooleanField(default=False, verbose_name='Is Accepted')
|
||||
user = models.OneToOneField(User, related_name='terminal', verbose_name=_('Application User'), null=True, on_delete=models.CASCADE)
|
||||
is_accepted = models.BooleanField(default=False, verbose_name=_('Is Accepted'))
|
||||
is_deleted = models.BooleanField(default=False)
|
||||
date_created = models.DateTimeField(auto_now_add=True)
|
||||
comment = models.TextField(blank=True, verbose_name=_('Comment'))
|
||||
|
@ -159,6 +160,7 @@ class Terminal(StorageMixin, TerminalStatusMixin, models.Model):
|
|||
|
||||
def delete(self, using=None, keep_parents=False):
|
||||
if self.user:
|
||||
setattr(self.user, SKIP_SIGNAL, True)
|
||||
self.user.delete()
|
||||
self.user = None
|
||||
self.is_deleted = True
|
||||
|
|
|
@ -70,6 +70,9 @@ class CommandAlertMessage(CommandAlertMixin, SystemMessage):
|
|||
def __init__(self, command):
|
||||
self.command = command
|
||||
|
||||
def __str__(self):
|
||||
return str(self.message_type_label)
|
||||
|
||||
@classmethod
|
||||
def gen_test_msg(cls):
|
||||
command = Command.objects.first().to_dict()
|
||||
|
|
|
@ -0,0 +1,18 @@
|
|||
# Generated by Django 3.2.14 on 2022-11-04 07:06
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('tickets', '0018_applyapplicationticket_apply_actions'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AlterField(
|
||||
model_name='applyapplicationticket',
|
||||
name='apply_type',
|
||||
field=models.CharField(choices=[('mysql', 'MySQL'), ('mariadb', 'MariaDB'), ('oracle', 'Oracle'), ('postgresql', 'PostgreSQL'), ('sqlserver', 'SQLServer'), ('redis', 'Redis'), ('mongodb', 'MongoDB'), ('clickhouse', 'ClickHouse'), ('chrome', 'Chrome'), ('mysql_workbench', 'MySQL Workbench'), ('vmware_client', 'vSphere Client'), ('custom', 'Custom'), ('k8s', 'Kubernetes')], max_length=16, verbose_name='Type'),
|
||||
),
|
||||
]
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue