feat: Supports saving operate-logs, user-login-logs, password-change-logs, and FTP-logs to ES

pull/14153/head
jiangweidong 2024-09-14 15:04:33 +08:00
parent 054d385ffc
commit d1ead59b52
43 changed files with 965 additions and 472 deletions

View File

@ -3,6 +3,8 @@ from django.utils.translation import gettext_lazy as _
from accounts.models import Account
from assets.models import Asset
from audits.backends import get_log_storage
from audits.const import LogType
from audits.models import UserLoginLog
from notifications.notifications import UserMessage
from users.models import User
@ -34,7 +36,8 @@ class UserLoginReminderMsg(UserMessage):
@classmethod
def gen_test_msg(cls):
user = User.objects.first()
user_log = UserLoginLog.objects.first()
storage = get_log_storage(LogType.login_log)
user_log = storage.get_manager().first()
return cls(user, user_log)

View File

@ -1,12 +1,8 @@
# -*- coding: utf-8 -*-
#
from importlib import import_module
from django.conf import settings
from django.db.models import F, Value, CharField, Q
from django.db.models.functions import Cast
from django.http import HttpResponse, FileResponse
from django.utils.encoding import escape_uri_path
from rest_framework import generics
from rest_framework import status
from rest_framework import viewsets
@ -18,9 +14,9 @@ from common.api import CommonApiMixin
from common.const.http import GET, POST
from common.drf.filters import DatetimeRangeFilterBackend
from common.permissions import IsServiceAccount
from common.plugins.es import QuerySet as ESQuerySet
from common.sessions.cache import user_session_manager
from common.storage.ftp_file import FTPFileStorageHandler
from common.storage.backends.ftp_file import FTPFileStorageHandler
from common.storage.mixins import StorageDestroyModelMixin, StorageTestConnectiveMixin
from common.utils import is_uuid, get_logger, lazyproperty
from orgs.mixins.api import OrgReadonlyModelViewSet, OrgModelViewSet
from orgs.models import Organization
@ -28,19 +24,20 @@ from orgs.utils import current_org, tmp_to_root_org
from rbac.permissions import RBACPermission
from terminal.models import default_storage
from users.models import User
from .backends import TYPE_ENGINE_MAPPING
from .const import ActivityChoices
from .filters import UserSessionFilterSet, OperateLogFilterSet
from .models import (
FTPLog, UserLoginLog, OperateLog, PasswordChangeLog,
ActivityLog, JobLog, UserSession
ActivityLog, JobLog, UserSession, LogStorage
)
from .serializers import (
FTPLogSerializer, UserLoginLogSerializer, JobLogSerializer,
OperateLogSerializer, OperateLogActionDetailSerializer,
PasswordChangeLogSerializer, ActivityUnionLogSerializer,
FileSerializer, UserSessionSerializer
FileSerializer, UserSessionSerializer, LogStorageSerializer
)
from .backends import get_log_storage
from .const import LogType
from .utils import construct_userlogin_usernames
logger = get_logger(__name__)
@ -73,6 +70,12 @@ class FTPLogViewSet(OrgModelViewSet):
'download': 'audits.view_ftplog',
}
def get_queryset(self):
return get_log_storage(LogType.ftp_log).get_manager().all()
def get_object(self):
return self.get_queryset().get(id=self.kwargs.get('pk'))
def get_storage(self):
return FTPFileStorageHandler(self.get_object())
@ -89,11 +92,7 @@ class FTPLogViewSet(OrgModelViewSet):
return HttpResponse(url)
file = open(default_storage.path(local_path), 'rb')
response = FileResponse(file)
response['Content-Type'] = 'application/octet-stream'
filename = escape_uri_path(ftp_log.filename)
response["Content-Disposition"] = "attachment; filename*=UTF-8''{}".format(filename)
return response
return FileResponse(file, as_attachment=True, filename=ftp_log.filename)
@action(methods=[POST], detail=True, permission_classes=[IsServiceAccount, ], serializer_class=FileSerializer)
def upload(self, request, *args, **kwargs):
@ -124,6 +123,10 @@ class UserLoginCommonMixin:
filterset_fields = ['id', 'username', 'ip', 'city', 'type', 'status', 'mfa']
search_fields = ['id', 'username', 'ip', 'city']
def get_queryset(self):
storage = get_log_storage(LogType.login_log)
return storage.get_manager().all()
class UserLoginLogViewSet(UserLoginCommonMixin, OrgReadonlyModelViewSet):
@staticmethod
@ -145,8 +148,9 @@ class MyLoginLogViewSet(UserLoginCommonMixin, OrgReadonlyModelViewSet):
permission_classes = [IsAuthenticated]
def get_queryset(self):
user = self.request.user
qs = super().get_queryset()
qs = qs.filter(username=self.request.user.username)
qs = qs.filter(username__in=[f"{user.name}({user.username})", user.username])
return qs
@ -218,17 +222,10 @@ class OperateLogViewSet(OrgReadonlyModelViewSet):
return super().get_serializer_class()
def get_queryset(self):
qs = OperateLog.objects.all()
qs = get_log_storage(LogType.operate_log).get_manager().all()
if self.is_action_detail:
with tmp_to_root_org():
qs |= OperateLog.objects.filter(org_id=Organization.SYSTEM_ID)
es_config = settings.OPERATE_LOG_ELASTICSEARCH_CONFIG
if es_config:
engine_mod = import_module(TYPE_ENGINE_MAPPING['es'])
store = engine_mod.OperateLogStore(es_config)
if store.ping(timeout=2):
qs = ESQuerySet(store)
qs.model = OperateLog
qs |= qs.filter(org_id=Organization.SYSTEM_ID)
return qs
@ -244,7 +241,7 @@ class PasswordChangeLogViewSet(OrgReadonlyModelViewSet):
ordering = ['-datetime']
def get_queryset(self):
queryset = super().get_queryset()
queryset = get_log_storage(LogType.password_change_log).get_manager()
if not current_org.is_root():
users = current_org.get_members()
queryset = queryset.filter(
@ -290,3 +287,19 @@ class UserSessionViewSet(CommonApiMixin, viewsets.ModelViewSet):
user_session_manager.remove(key)
queryset.delete()
return Response(status=status.HTTP_200_OK)
class LogStorageViewSet(
StorageDestroyModelMixin, CommonApiMixin, viewsets.ModelViewSet
):
search_fields = ('name', 'type')
filterset_fields = ['type',]
queryset = LogStorage.objects.all()
serializer_class = LogStorageSerializer
class LogStorageTestConnectiveApi(StorageTestConnectiveMixin, generics.RetrieveAPIView):
queryset = LogStorage.objects.all()
rbac_perms = {
'retrieve': 'audits.view_logstorage'
}

View File

@ -1,18 +1,81 @@
from importlib import import_module
from typing import Any
from django.conf import settings
from audits.const import LogType, LogStorageType as LSType
from common.utils import get_logger
from common.plugins.es import InvalidElasticsearch
from .es import (
OperateLogStore as ESOperateLogStore,
LoginLogStore as ESLoginLogStore,
FTPLogStore as ESFTPLogStore,
PasswordChangeLogStore as ESPwdChangeLogStore,
)
from .db import (
OperateLogStore as DBOperateLogStore,
LoginLogStore as DBLoginLogStore,
FTPLogStore as DBFTPLogStore,
PasswordChangeLogStore as DBPwdChangeLogStore,
)
TYPE_ENGINE_MAPPING = {
'db': 'audits.backends.db',
'es': 'audits.backends.es',
logger = get_logger(__file__)
_global_log_storage: dict[str: Any] = {
LogType.operate_log: None,
LogType.login_log: None,
LogType.ftp_log: None,
LogType.password_change_log: None
}
log_type_mapping = {
LogType.operate_log: {
LSType.server: DBOperateLogStore, LSType.es: ESOperateLogStore
},
LogType.login_log: {
LSType.server: DBLoginLogStore, LSType.es: ESLoginLogStore
},
LogType.ftp_log: {
LSType.server: DBFTPLogStore, LSType.es: ESFTPLogStore
},
LogType.password_change_log: {
LSType.server: DBPwdChangeLogStore, LSType.es: ESPwdChangeLogStore
},
}
def get_operate_log_storage(default=False):
engine_mod = import_module(TYPE_ENGINE_MAPPING['db'])
es_config = settings.OPERATE_LOG_ELASTICSEARCH_CONFIG
if not default and es_config:
engine_mod = import_module(TYPE_ENGINE_MAPPING['es'])
storage = engine_mod.OperateLogStore(es_config)
return storage
def refresh_log_storage():
from audits.models import LogStorage
for log_type in _global_log_storage.keys():
_global_log_storage[log_type] = None
for storage in LogStorage.objects.exclude(type=LSType.server):
for log_type in list(storage.meta.get('LOG_TYPES', [])):
try:
log_storage: Any = log_type_mapping[log_type][storage.type](storage.config)
_global_log_storage[log_type] = log_storage
except InvalidElasticsearch:
logger.warning('Invalid Elasticsearch logs storage type: %s' % log_type)
for log_type, storage in _global_log_storage.items():
if not storage:
server_storage: Any = log_type_mapping[log_type][LSType.server]()
_global_log_storage[log_type] = server_storage
def get_log_storage(log_type, backend=None, backend_kwargs=None):
if backend:
params = backend_kwargs or {}
return log_type_mapping[log_type][backend](**params)
if _global_log_storage[log_type] is not None:
log_storage = _global_log_storage[log_type]
else:
refresh_log_storage()
default = log_type_mapping[log_type][LSType.server]()
log_storage = _global_log_storage.get(log_type, default)
if not log_storage.ping(timeout=3):
logger.warning('Switch default log storage. Type: %s' % log_type)
log_storage = log_type_mapping[log_type][LSType.server]()
return log_storage

View File

@ -1,26 +1,49 @@
# ~*~ coding: utf-8 ~*~
from typing import Any
from django.utils.translation import gettext_lazy as _
from audits.models import OperateLog
from perms.const import ActionChoices
from audits.models import (
OperateLog, UserLoginLog, PasswordChangeLog, FTPLog
)
from audits.const import LogStorageType
from .mixin import OperateStorageMixin
class OperateLogStore(object):
# 使用 Unicode 单元分隔符\u001f替代旧的分隔符\0 PostgreSQL 数据库不支持\0
SEP = '\u001f'
OLD_SEP = '\0'
def __init__(self, config):
self.model = OperateLog
self.max_length = 2048
self.max_length_tip_msg = _(
'The text content is too long. Use Elasticsearch to store operation logs'
)
class BaseLogStore(OperateStorageMixin):
model: Any # Log model
type = LogStorageType.server
@staticmethod
def ping(timeout=None):
return True
@staticmethod
def update(instance, validated_data):
for attr, value in validated_data.items():
setattr(instance, attr, value)
instance.save()
return instance
def save(self, **kwargs):
return self.model.objects.create(**kwargs)
def get_manager(self):
return self.model.objects
class OperateLogStore(BaseLogStore):
# 使用 Unicode 单元分隔符\u001f替代旧的分隔符\0 PostgreSQL 数据库不支持\0
SEP = '\u001f'
OLD_SEP = '\0'
model = OperateLog
def __init__(self, *args, **kwargs):
self.max_length = 2048
self.max_length_tip_msg = _(
'The text content is too long. Use Elasticsearch to store operation logs'
)
@classmethod
def split_value(cls, value):
"""
@ -63,14 +86,6 @@ class OperateLogStore(object):
before[k], after[k] = before_value, after_value
return before, after
@staticmethod
def _get_special_handler(resource_type):
# 根据资源类型,处理特殊字段
resource_map = {
'Asset permission': lambda k, v: ActionChoices.display(int(v)) if k == 'Actions' else v
}
return resource_map.get(resource_type, lambda k, v: _(v))
@classmethod
def convert_diff_friendly(cls, op_log):
diff_list = list()
@ -111,3 +126,15 @@ class OperateLogStore(object):
setattr(op_log, 'LOCKING_ORG', op_log.org_id)
op_log.diff = diff
op_log.save()
class LoginLogStore(BaseLogStore):
model = UserLoginLog
class FTPLogStore(BaseLogStore):
model = FTPLog
class PasswordChangeLogStore(BaseLogStore):
model = PasswordChangeLog

View File

@ -1,71 +1,115 @@
# -*- coding: utf-8 -*-
#
import json
import uuid
from typing import Any
from django.utils.translation import gettext_lazy as _
from django.db.models.signals import post_save
from audits.const import LogStorageType
from audits.models import (
OperateLog, UserLoginLog, PasswordChangeLog, FTPLog
)
from common.db.encoder import ModelJSONFieldEncoder
from common.utils import get_logger, data_to_json
from common.utils.timezone import local_now_display
from common.utils import get_logger
from common.utils.encode import Singleton
from common.plugins.es import ES
from common.plugins.es import ES, QuerySet as ESQuerySet
from .mixin import OperateStorageMixin
logger = get_logger(__file__)
class OperateLogStore(ES, metaclass=Singleton):
class BaseLogStorage(ES):
model: Any # Log model
type = LogStorageType.es
date_field_name = 'datetime'
_manager = None
def make_data(self, data):
if not data.get('id'):
data['id'] = str(uuid.uuid4())
if not data.get(self.date_field_name):
data[self.date_field_name] = local_now_display()
return json.loads(data_to_json(data, cls=ModelJSONFieldEncoder))
def update(self, instance, validated_data):
other_params = {}
es_id = getattr(instance, 'es_id', '')
if self.version == 7:
other_params = {'doc_type': self.doc_type}
data = {'doc': json.loads(data_to_json(validated_data, cls=ModelJSONFieldEncoder))}
self.es.update(
index=self.index, id=es_id, body=data, refresh=True, **other_params
)
for k, v in validated_data.items():
setattr(instance, k, v)
return instance
def save(self, **kwargs):
super().save(**kwargs)
instance = self.model.from_dict(kwargs)
post_save.send(sender=self.model, instance=instance, created=True)
return instance
def get_manager(self):
if self._manager is None:
self._manager = ESQuerySet(self)
self._manager.model = self.model
return self._manager
class OperateLogStore(OperateStorageMixin, BaseLogStorage):
model = OperateLog
def __init__(self, config):
properties = {
"id": {
"type": "keyword"
},
"user": {
"type": "keyword"
},
"action": {
"type": "keyword"
},
"resource_type": {
"type": "keyword"
},
"org_id": {
"type": "keyword"
},
"datetime": {
"type": "date",
"format": "yyyy-MM-dd HH:mm:ss"
}
'id': {'type': 'keyword'},
'user': {'type': 'keyword'},
'action': {'type': 'keyword'},
'resource': {'type': 'keyword'},
'resource_type': {'type': 'keyword'},
'remote_addr': {'type': 'keyword'},
'org_id': {'type': 'keyword'},
'datetime': {'type': 'date', 'format': 'yyyy-MM-dd HH:mm:ss'}
}
exact_fields = {}
match_fields = {
'id', 'user', 'action', 'resource_type',
'resource', 'remote_addr', 'org_id'
}
keyword_fields = {
'id', 'user', 'action', 'resource_type', 'org_id'
}
if not config.get('INDEX'):
keyword_fields = {k for k, v in properties.items() if v.get('type') == 'keyword'}
exact_fields = keyword_fields | {'datetime'}
if not config.get('INDEX', None):
config['INDEX'] = 'jumpserver_operate_log'
super().__init__(config, properties, keyword_fields, exact_fields, match_fields)
self.pre_use_check()
else:
config['INDEX'] = f"{config['INDEX']}_operate_log"
super().__init__(config, properties, keyword_fields, exact_fields=exact_fields)
@staticmethod
def make_data(data):
op_id = data.get('id', str(uuid.uuid4()))
datetime_param = data.get('datetime', local_now_display())
data = {
'id': op_id, 'user': data['user'], 'action': data['action'],
'resource_type': data['resource_type'], 'resource': data['resource'],
'remote_addr': data['remote_addr'], 'datetime': datetime_param,
'before': data['before'], 'after': data['after'], 'org_id': data['org_id']
}
return data
@classmethod
def convert_diff_friendly(cls, op_log):
diff_list = []
handler = cls._get_special_handler(op_log.get('resource_type'))
before = op_log.get('before') or {}
after = op_log.get('after') or {}
keys = set(before.keys()) | set(after.keys())
for key in keys:
before_v, after_v = before.get(key), after.get(key)
diff_list.append({
'field': _(key),
'before': handler(key, before_v) if before_v else _('empty'),
'after': handler(key, after_v) if after_v else _('empty'),
})
return diff_list
def save(self, **kwargs):
log_id = kwargs.get('id', '')
before = kwargs.get('before') or {}
after = kwargs.get('after') or {}
op_log = self.get({'id': log_id})
op_log = None
if log_id:
op_log = self.get({'id': log_id})
if op_log is not None:
other_params = {}
data = {'doc': {}}
raw_after = op_log.get('after') or {}
raw_before = op_log.get('before') or {}
@ -73,13 +117,80 @@ class OperateLogStore(ES, metaclass=Singleton):
raw_after.update(after)
data['doc']['before'] = raw_before
data['doc']['after'] = raw_after
self.es.update(
index=self.index, doc_type=self.doc_type,
id=op_log.get('es_id'), body=data, refresh=True
if self.version == 7:
other_params = {'doc_type': self.doc_type}
return self.es.update(
index=self.index, id=op_log.get('es_id'), body=data,
refresh=True, **other_params
)
else:
data = self.make_data(kwargs)
self.es.index(
index=self.index, doc_type=self.doc_type, body=data,
refresh=True
)
return super().save(**kwargs)
class LoginLogStore(BaseLogStorage):
model = UserLoginLog
def __init__(self, config):
properties = {
'id': {'type': 'keyword'},
'username': {'type': 'keyword'},
'type': {'type': 'keyword'},
'ip': {'type': 'keyword'},
'city': {'type': 'keyword'},
'backend': {'type': 'keyword'},
'org_id': {'type': 'keyword'},
'datetime': {'type': 'date', 'format': 'yyyy-MM-dd HH:mm:ss'}
}
keyword_fields = {k for k, v in properties.items() if v.get('type') == 'keyword'}
exact_fields = keyword_fields | {'datetime'}
if not config.get('INDEX', None):
config['INDEX'] = 'jumpserver_login_log'
else:
config['INDEX'] = f"{config['INDEX']}_login_log"
super().__init__(config, properties, keyword_fields, exact_fields)
class FTPLogStore(BaseLogStorage):
model = FTPLog
date_field_name = 'date_start'
def __init__(self, config):
properties = {
'id': {'type': 'keyword'},
'user': {'type': 'keyword'},
'asset': {'type': 'keyword'},
'account': {'type': 'keyword'},
'remote_addr': {'type': 'keyword'},
'session': {'type': 'keyword'},
'operate': {'type': 'keyword'},
'org_id': {'type': 'keyword'},
'date_start': {'type': 'date', 'format': 'yyyy-MM-dd HH:mm:ss'},
}
keyword_fields = {k for k, v in properties.items() if v.get('type') == 'keyword'}
exact_fields = keyword_fields | {'date_start'}
if not config.get('INDEX', None):
config['INDEX'] = 'jumpserver_ftp_log'
else:
config['INDEX'] = f"{config['INDEX']}_ftp_log"
super().__init__(config, properties, keyword_fields, exact_fields=exact_fields)
class PasswordChangeLogStore(BaseLogStorage):
model = PasswordChangeLog
def __init__(self, config):
properties = {
'id': {'type': 'keyword'},
'user': {'type': 'keyword'},
'change_by': {'type': 'keyword'},
'remote_addr': {'type': 'keyword'},
'org_id': {'type': 'keyword'},
'datetime': {'type': 'date', 'format': 'yyyy-MM-dd HH:mm:ss'}
}
keyword_fields = {'id', 'user', 'change_by', 'remote_addr', 'org_id'}
exact_fields = keyword_fields
if not config.get('INDEX', None):
config['INDEX'] = 'jumpserver_password_change_log'
else:
config['INDEX'] = f"{config['INDEX']}_password_change_log"
super().__init__(config, properties, keyword_fields, exact_fields)

View File

@ -0,0 +1,11 @@
from perms.const import ActionChoices
class OperateStorageMixin(object):
@staticmethod
def _get_special_handler(resource_type):
# 根据资源类型,处理特殊字段
resource_map = {
'Asset permission': lambda k, v: ActionChoices.display(int(v)) if k == 'Actions' else v
}
return resource_map.get(resource_type, lambda k, v: v)

View File

@ -63,3 +63,15 @@ class MFAChoices(IntegerChoices):
class LoginStatusChoices(IntegerChoices):
success = True, _("Success")
failed = False, _("Failed")
class LogStorageType(TextChoices):
server = 'server', 'Server'
es = 'es', 'Elasticsearch'
class LogType(TextChoices):
operate_log = 'operate_log', _('Operate log')
login_log = 'login_log', _('User login log')
ftp_log = 'ftp_log', _('File transfer log')
password_change_log = 'password_change_log', _('Password change log')

View File

@ -16,7 +16,8 @@ from settings.models import Setting
from settings.serializers import SettingsSerializer
from users.models import Preference
from users.serializers import PreferenceSerializer
from .backends import get_operate_log_storage
from .const import LogType
from .backends import get_log_storage
logger = get_logger(__name__)
@ -24,14 +25,6 @@ logger = get_logger(__name__)
class OperatorLogHandler(metaclass=Singleton):
CACHE_KEY = 'OPERATOR_LOG_CACHE_KEY'
def __init__(self):
self.log_client = self.get_storage_client()
@staticmethod
def get_storage_client():
client = get_operate_log_storage()
return client
@staticmethod
def _consistent_type_to_str(value1, value2):
if isinstance(value1, datetime):
@ -176,16 +169,11 @@ class OperatorLogHandler(metaclass=Singleton):
'remote_addr': remote_addr, 'before': before, 'after': after,
}
with transaction.atomic():
if self.log_client.ping(timeout=1):
client = self.log_client
else:
logger.info('Switch default operate log storage save.')
client = get_operate_log_storage(default=True)
try:
client = get_log_storage(LogType.operate_log)
client.save(**data)
except Exception as e:
error_msg = 'An error occurred saving OperateLog.' \
error_msg = 'An error occurred saving OperateLog.\n' \
'Error: %s, Data: %s' % (e, data)
logger.error(error_msg)

View File

@ -0,0 +1,47 @@
# Generated by Django 4.1.13 on 2024-09-06 08:40
import common.db.fields
from django.db import migrations, models
import uuid
def init_log_storage(apps, schema_editor):
model = apps.get_model("audits", "LogStorage")
model.objects.update_or_create(
name="default", type="server", is_default=True,
defaults={
"name": "default",
"type": "server",
"comment": "Store locally",
"meta": "{}",
},
)
class Migration(migrations.Migration):
dependencies = [
('audits', '0003_auto_20180816_1652'),
]
operations = [
migrations.CreateModel(
name='LogStorage',
fields=[
('created_by', models.CharField(blank=True, max_length=128, null=True, verbose_name='Created by')),
('updated_by', models.CharField(blank=True, max_length=128, null=True, verbose_name='Updated by')),
('date_created', models.DateTimeField(auto_now_add=True, null=True, verbose_name='Date created')),
('date_updated', models.DateTimeField(auto_now=True, verbose_name='Date updated')),
('comment', models.TextField(blank=True, default='', verbose_name='Comment')),
('id', models.UUIDField(default=uuid.uuid4, primary_key=True, serialize=False)),
('name', models.CharField(max_length=128, unique=True, verbose_name='Name')),
('meta', common.db.fields.EncryptJsonDictTextField(default={})),
('is_default', models.BooleanField(default=False, verbose_name='Default')),
('type', models.CharField(choices=[('server', 'Server'), ('es', 'Elasticsearch')], default='server', max_length=16, verbose_name='Type')),
],
options={
'abstract': False,
},
),
migrations.RunPython(init_log_storage),
]

View File

@ -1,5 +1,6 @@
import os
import uuid
from datetime import timedelta
from importlib import import_module
@ -11,8 +12,10 @@ from django.utils import timezone
from django.utils.translation import gettext, gettext_lazy as _
from common.db.encoder import ModelJSONFieldEncoder
from common.db.models import JMSBaseModel
from common.sessions.cache import user_session_manager
from common.utils import lazyproperty, i18n_trans
from common.storage.mixins import CommonStorageModelMixin
from common.utils import lazyproperty, i18n_trans, get_logger
from ops.models import JobExecution
from orgs.mixins.models import OrgModelMixin, Organization
from orgs.utils import current_org
@ -24,6 +27,7 @@ from .const import (
LoginTypeChoices,
MFAChoices,
LoginStatusChoices,
LogStorageType
)
__all__ = [
@ -33,11 +37,47 @@ __all__ = [
"PasswordChangeLog",
"UserLoginLog",
"JobLog",
"UserSession"
"UserSession",
"LogStorage",
]
class JobLog(JobExecution):
logger = get_logger(__file__)
class LogMixin(object):
@lazyproperty
def python_table(self):
convert_table = {}
for field in self._meta.fields: # noqa
if hasattr(field, 'to_python') and hasattr(field, 'name'):
convert_table[field.name] = field.to_python
else:
convert_table[field.name] = lambda value: value
return convert_table
@classmethod
def from_dict(cls, d, flat=False):
self, result = cls(), []
for field, value in d.items():
value = value.pk if isinstance(value, models.Model) else value
handler = self.python_table.get(field)
if handler is None:
continue
real_value = handler(value)
if flat:
result.append(real_value)
else:
setattr(self, field, real_value)
return result if flat else self
@classmethod
def from_multi_dict(cls, l, flat=False):
return [cls.from_dict(d, flat) for d in l]
class JobLog(LogMixin, JobExecution):
@property
def creator_name(self):
return self.creator.name
@ -47,7 +87,7 @@ class JobLog(JobExecution):
verbose_name = _("Job audit log")
class FTPLog(OrgModelMixin):
class FTPLog(LogMixin, OrgModelMixin):
upload_to = 'FTP_FILES'
id = models.UUIDField(default=uuid.uuid4, primary_key=True)
@ -85,7 +125,7 @@ class FTPLog(OrgModelMixin):
return name, None
class OperateLog(OrgModelMixin):
class OperateLog(LogMixin, OrgModelMixin):
id = models.UUIDField(default=uuid.uuid4, primary_key=True)
user = models.CharField(max_length=128, verbose_name=_("User"))
action = models.CharField(
@ -113,21 +153,6 @@ class OperateLog(OrgModelMixin):
self.org_id = Organization.ROOT_ID
return super(OperateLog, self).save(*args, **kwargs)
@classmethod
def from_dict(cls, d):
self = cls()
for k, v in d.items():
setattr(self, k, v)
return self
@classmethod
def from_multi_dict(cls, l):
operate_logs = []
for d in l:
operate_log = cls.from_dict(d)
operate_logs.append(operate_log)
return operate_logs
class Meta:
verbose_name = _("Operate log")
ordering = ('-datetime',)
@ -167,7 +192,7 @@ class ActivityLog(OrgModelMixin):
return super(ActivityLog, self).save(*args, **kwargs)
class PasswordChangeLog(models.Model):
class PasswordChangeLog(LogMixin, models.Model):
id = models.UUIDField(default=uuid.uuid4, primary_key=True)
user = models.CharField(max_length=128, verbose_name=_("User"))
change_by = models.CharField(max_length=128, verbose_name=_("Change by"))
@ -183,7 +208,7 @@ class PasswordChangeLog(models.Model):
verbose_name = _("Password change log")
class UserLoginLog(models.Model):
class UserLoginLog(LogMixin, models.Model):
id = models.UUIDField(default=uuid.uuid4, primary_key=True)
username = models.CharField(max_length=128, verbose_name=_("Username"))
type = models.CharField(
@ -256,7 +281,7 @@ class UserLoginLog(models.Model):
verbose_name = _("User login log")
class UserSession(models.Model):
class UserSession(LogMixin, models.Model):
_OPERATE_LOG_ACTION = {'delete': ActionChoices.finished}
id = models.UUIDField(default=uuid.uuid4, primary_key=True)
@ -301,3 +326,34 @@ class UserSession(models.Model):
permissions = [
('offline_usersession', _('Offline user session')),
]
class LogStorage(CommonStorageModelMixin, JMSBaseModel):
type = models.CharField(
max_length=16, choices=LogStorageType.choices,
default=LogStorageType.es.value, verbose_name=_("Type"),
)
@property
def type_null_or_server(self):
return self.type == LogStorageType.server
def is_valid(self):
if self.type == LogStorageType.server:
return True
from .backends import get_log_storage
for log_type in self.meta.get('LOG_TYPES', []):
log_store = get_log_storage(
log_type, self.type, {'config': self.config}
)
log_store.pre_use_check()
return True
@staticmethod
def is_use():
return False
def save(self, *args, **kwargs):
self.is_valid()
super().save(*args, **kwargs)

View File

@ -3,8 +3,9 @@
from django.utils.translation import gettext_lazy as _
from rest_framework import serializers
from audits.backends.db import OperateLogStore
from audits.backends import get_log_storage, refresh_log_storage
from common.serializers.fields import LabeledChoiceField, ObjectRelatedField
from common.storage.serializers import BaseStorageSerializer, StorageTypeESSerializer
from common.utils import reverse, i18n_trans
from common.utils.timezone import as_current_tz
from ops.serializers.job import JobExecutionSerializer
@ -13,9 +14,8 @@ from terminal.models import Session
from users.models import User
from . import models
from .const import (
ActionChoices, OperateChoices,
MFAChoices, LoginStatusChoices,
LoginTypeChoices, ActivityChoices,
ActionChoices, OperateChoices, MFAChoices, LoginStatusChoices,
LoginTypeChoices, ActivityChoices, LogStorageType, LogType
)
@ -47,6 +47,13 @@ class FTPLogSerializer(serializers.ModelSerializer):
]
fields = fields_small
def update(self, instance, validated_data):
return get_log_storage(LogType.ftp_log).update(instance, validated_data)
def create(self, validated_data):
storage = get_log_storage(LogType.ftp_log)
return storage.save(**validated_data)
class UserLoginLogSerializer(serializers.ModelSerializer):
mfa = LabeledChoiceField(choices=MFAChoices.choices, label=_("MFA"))
@ -77,7 +84,9 @@ class OperateLogActionDetailSerializer(serializers.ModelSerializer):
fields = ('diff',)
def to_representation(self, instance):
return {'diff': OperateLogStore.convert_diff_friendly(instance)}
return {
'diff': get_log_storage(LogType.operate_log).convert_diff_friendly(instance)
}
class OperateLogSerializer(BulkOrgResourceModelSerializer):
@ -189,3 +198,42 @@ class UserSessionSerializer(serializers.ModelSerializer):
if not request:
return False
return request.session.session_key == obj.key
class LogStorageWithESSerializer(StorageTypeESSerializer):
LOG_TYPES = serializers.MultipleChoiceField(choices=LogType.choices, label=_('Log types'))
INDEX = serializers.CharField(
max_length=1024, default='jumpserver', label=_('Index prefix'), allow_null=True
)
class LogStorageSerializer(BaseStorageSerializer):
type = LabeledChoiceField(choices=LogStorageType.choices, label=_('Type'))
storage_type_serializer_classes_mapping = {
LogStorageType.es.value: LogStorageWithESSerializer
}
log_types_display = serializers.SerializerMethodField(label=_('Log types'))
class Meta(BaseStorageSerializer.Meta):
model = models.LogStorage
fields = ['id', 'name', 'type', 'meta', 'comment', 'log_types_display']
@staticmethod
def get_log_types_display(obj):
return [LogType(i).label for i in obj.meta.get('LOG_TYPES', [])]
@staticmethod
def update_log_types(instance):
current_log_types = instance.meta.get('LOG_TYPES', [])
storages = models.LogStorage.objects.exclude(id=instance.id)
for storage in storages:
log_types = storage.meta.get('LOG_TYPES', [])
storage.meta['LOG_TYPES'] = set(log_types) - set(current_log_types)
models.LogStorage.objects.bulk_update(storages, ['meta'])
return current_log_types
def save(self, **kwargs):
instance = super().save(**kwargs)
self.update_log_types(instance)
refresh_log_storage()
return instance

View File

@ -11,6 +11,8 @@ from rest_framework.request import Request
from acls.models import LoginACL
from acls.notifications import UserLoginReminderMsg
from audits.backends import get_log_storage
from audits.const import LogType
from audits.models import UserLoginLog
from authentication.signals import post_auth_failed, post_auth_success
from authentication.utils import check_different_city_login_if_need
@ -105,7 +107,7 @@ def create_user_session(request, user_id, instance: UserLoginLog):
def send_login_info_to_reviewers(instance: UserLoginLog | str, auth_acl_id):
if isinstance(instance, str):
instance = UserLoginLog.objects.filter(id=instance).first()
instance = get_log_storage(LogType.login_log).filter(id=instance).first()
if not instance:
return

View File

@ -176,7 +176,7 @@ def on_django_start_set_operate_log_monitor_models(sender, **kwargs):
}
exclude_models = {
'UserPasswordHistory', 'ContentType', 'Asset',
'MessageContent', 'SiteMessage',
'MessageContent', 'SiteMessage', 'UserSession',
'PlatformAutomation', 'PlatformProtocol', 'Protocol',
'HistoricalAccount', 'GatheredUser', 'ApprovalRule',
'BaseAutomation', 'CeleryTask', 'Command', 'JobLog',
@ -189,7 +189,7 @@ def on_django_start_set_operate_log_monitor_models(sender, **kwargs):
'ApplyCommandTicket', 'ApplyLoginAssetTicket',
'FavoriteAsset', 'ChangeSecretRecord'
}
include_models = {'UserSession'}
include_models = set()
for i, app in enumerate(apps.get_models(), 1):
app_name = app._meta.app_label
model_name = app._meta.object_name

View File

@ -3,6 +3,8 @@
from django.dispatch import receiver
from django.db import transaction
from audits.backends import get_log_storage
from audits.const import LogType
from audits.models import (
PasswordChangeLog, UserLoginLog, FTPLog, OperateLog
)
@ -34,9 +36,8 @@ def on_user_change_password(sender, user=None, **kwargs):
else:
change_by = str(current_request.user)
with transaction.atomic():
PasswordChangeLog.objects.create(
user=str(user), change_by=change_by,
remote_addr=remote_addr,
get_log_storage(LogType.password_change_log).save(
user=str(user), change_by=change_by, remote_addr=remote_addr
)

View File

@ -11,8 +11,10 @@ from django.db import transaction
from django.utils import timezone
from django.utils.translation import gettext_lazy as _
from audits.backends import get_log_storage
from audits.const import LogType
from common.const.crontab import CRONTAB_AT_AM_TWO
from common.storage.ftp_file import FTPFileStorageHandler
from common.storage.backends.ftp_file import FTPFileStorageHandler
from common.utils import get_log_keep_day, get_logger
from ops.celery.decorator import register_as_period_task
from ops.models import CeleryTaskExecution
@ -145,7 +147,7 @@ def clean_audits_log_period():
@shared_task(verbose_name=_('Upload FTP file to external storage'))
def upload_ftp_file_to_external_storage(ftp_log_id, file_name):
logger.info(f'Start upload FTP file record to external storage: {ftp_log_id} - {file_name}')
ftp_log = FTPLog.objects.filter(id=ftp_log_id).first()
ftp_log = get_log_storage(LogType.ftp_log).get_manager().filter(id=ftp_log_id).first()
if not ftp_log:
logger.error(f'FTP db item not found: {ftp_log_id}')
return

View File

@ -16,9 +16,12 @@ router.register(r'password-change-logs', api.PasswordChangeLogViewSet, 'password
router.register(r'job-logs', api.JobAuditViewSet, 'job-log')
router.register(r'my-login-logs', api.MyLoginLogViewSet, 'my-login-log')
router.register(r'user-sessions', api.UserSessionViewSet, 'user-session')
router.register(r'log-storages', api.LogStorageViewSet, 'log-storage')
urlpatterns = [
path('activities/', api.ResourceActivityAPIView.as_view(), name='resource-activities'),
path('log-storages/<uuid:pk>/test-connective/', api.LogStorageTestConnectiveApi.as_view(),
name='log-storage-test-connective'),
]
urlpatterns += router.urls

View File

@ -1,4 +1,5 @@
import copy
from datetime import datetime
from itertools import chain
@ -7,17 +8,18 @@ from django.db import models
from django.db.models import F, Value, CharField
from django.db.models.functions import Concat
from audits.backends import get_log_storage
from audits.const import LogType
from common.db.fields import RelatedManager
from common.utils import validate_ip, get_ip_city, get_logger
from common.utils.timezone import as_current_tz
from .const import DEFAULT_CITY
logger = get_logger(__name__)
def write_login_log(*args, **kwargs):
from audits.models import UserLoginLog
ip = kwargs.get('ip') or ''
if not (ip and validate_ip(ip)):
ip = ip[:15]
@ -25,7 +27,7 @@ def write_login_log(*args, **kwargs):
else:
city = get_ip_city(ip) or DEFAULT_CITY
kwargs.update({'ip': ip, 'city': city})
return UserLoginLog.objects.create(**kwargs)
return get_log_storage(LogType.login_log).save(**kwargs)
def _get_instance_field_value(

View File

@ -6,11 +6,12 @@ from urllib.parse import urljoin, urlparse
from django.conf import settings
from django.utils.translation import gettext_lazy as _
from audits.const import DEFAULT_CITY
from audits.backends import get_log_storage
from audits.const import LogType
from users.models import User
from audits.models import UserLoginLog
from common.utils import get_logger, get_object_or_none
from common.utils import validate_ip, get_ip_city, get_request_ip
from common.utils import get_ip_city, get_request_ip
from .notifications import DifferentCityLoginMessage
logger = get_logger(__file__)
@ -26,7 +27,7 @@ def check_different_city_login_if_need(user, request):
if is_private:
return
usernames = [user.username, f"{user.name}({user.username})"]
last_user_login = UserLoginLog.objects.exclude(
last_user_login = get_log_storage(LogType.login_log).get_manager().exclude(
city__in=city_white
).filter(username__in=usernames, status=True).first()
if not last_user_login:

View File

@ -1,31 +1,31 @@
# -*- coding: utf-8 -*-
#
import datetime
import json
import inspect
import sys
import warnings
if sys.version_info.major == 3 and sys.version_info.minor >= 10:
from collections.abc import Iterable
else:
from collections import Iterable
from functools import reduce, partial
from itertools import groupby
from typing import Iterable, Sequence
from functools import partial
from uuid import UUID
from django.utils.translation import gettext_lazy as _
from django.db.models import QuerySet as DJQuerySet
from django.db.models import QuerySet as DJQuerySet, Q
from elasticsearch7 import Elasticsearch
from elasticsearch7.helpers import bulk
from elasticsearch7.exceptions import RequestError, SSLError
from elasticsearch7.exceptions import RequestError, SSLError, ElasticsearchWarning
from elasticsearch7.exceptions import NotFoundError as NotFoundError7
from elasticsearch8.exceptions import NotFoundError as NotFoundError8
from elasticsearch8.exceptions import BadRequestError
from common.db.encoder import ModelJSONFieldEncoder
from common.utils.common import lazyproperty
from common.utils import get_logger
from common.utils.timezone import local_now_date_display
from common.exceptions import JMSException
from common.exceptions import JMSException, JMSObjectDoesNotExist
warnings.filterwarnings("ignore", category=ElasticsearchWarning)
logger = get_logger(__file__)
@ -60,6 +60,8 @@ class ESClient(object):
class ESClientBase(object):
VERSION = 0
@classmethod
def get_properties(cls, data, index):
return data[index]['mappings']['properties']
@ -70,6 +72,8 @@ class ESClientBase(object):
class ESClientV7(ESClientBase):
VERSION = 7
def __init__(self, *args, **kwargs):
from elasticsearch7 import Elasticsearch
self.es = Elasticsearch(*args, **kwargs)
@ -80,6 +84,7 @@ class ESClientV7(ESClientBase):
class ESClientV6(ESClientV7):
VERSION = 6
@classmethod
def get_properties(cls, data, index):
@ -91,6 +96,8 @@ class ESClientV6(ESClientV7):
class ESClientV8(ESClientBase):
VERSION = 8
def __init__(self, *args, **kwargs):
from elasticsearch8 import Elasticsearch
self.es = Elasticsearch(*args, **kwargs)
@ -115,7 +122,6 @@ def get_es_client_version(**kwargs):
class ES(object):
def __init__(self, config, properties, keyword_fields, exact_fields=None, match_fields=None):
self.version = 7
self.config = config
hosts = self.config.get('HOSTS')
kwargs = self.config.get('OTHER', {})
@ -125,6 +131,7 @@ class ES(object):
kwargs['verify_certs'] = None
self.client = ESClient(hosts=hosts, max_retries=0, **kwargs)
self.es = self.client.es
self.version = self.client.VERSION
self.index_prefix = self.config.get('INDEX') or 'jumpserver'
self.is_index_by_date = bool(self.config.get('INDEX_BY_DATE', False))
@ -203,11 +210,14 @@ class ES(object):
logger.error(e, exc_info=True)
def make_data(self, data):
return []
return {}
def save(self, **kwargs):
other_params = {}
data = self.make_data(kwargs)
return self.es.index(index=self.index, doc_type=self.doc_type, body=data)
if self.version == 7:
other_params = {'doc_type': self.doc_type}
return self.es.index(index=self.index, body=data, refresh=True, **other_params)
def bulk_save(self, command_set, raise_on_error=True):
actions = []
@ -227,21 +237,19 @@ class ES(object):
item = data[0]
return item
def filter(self, query: dict, from_=None, size=None, sort=None):
def filter(self, query: dict, from_=None, size=None, sort=None, fields=None):
try:
data = self._filter(query, from_, size, sort)
from_, size = from_ or 0, size or 1
data = self._filter(query, from_, size, sort, fields)
except Exception as e:
logger.error('ES filter error: {}'.format(e))
data = []
return data
def _filter(self, query: dict, from_=None, size=None, sort=None):
body = self.get_query_body(**query)
def _filter(self, query: dict, from_=None, size=None, sort=None, fields=None):
body = self._get_query_body(query, fields)
search_params = {
'index': self.query_index,
'body': body,
'from_': from_,
'size': size
'index': self.query_index, 'body': body, 'from_': from_, 'size': size
}
if sort is not None:
search_params['sort'] = sort
@ -257,7 +265,7 @@ class ES(object):
def count(self, **query):
try:
body = self.get_query_body(**query)
body = self._get_query_body(query_params=query)
data = self.es.count(index=self.query_index, body=body)
count = data["count"]
except Exception as e:
@ -275,24 +283,36 @@ class ES(object):
def ping(self, timeout=None):
try:
return self.es.ping(request_timeout=timeout)
except Exception:
except Exception: # noqa
return False
@staticmethod
def handler_time_field(data):
datetime__gte = data.get('datetime__gte')
datetime__lte = data.get('datetime__lte')
datetime_range = {}
def _get_date_field_name(self):
date_field_name = ''
for name, attr in self.properties.items():
if attr.get('type', '') == 'date':
date_field_name = name
break
return date_field_name
def handler_time_field(self, data):
date_field_name = getattr(self, 'date_field_name', 'datetime')
datetime__gte = data.get(f'{date_field_name}__gte')
datetime__lte = data.get(f'{date_field_name}__lte')
datetime__range = data.get(f'{date_field_name}__range')
if isinstance(datetime__range, Sequence) and len(datetime__range) == 2:
datetime__gte = datetime__gte or datetime__range[0]
datetime__lte = datetime__lte or datetime__range[1]
datetime_range = {}
if datetime__gte:
if isinstance(datetime__gte, datetime.datetime):
if isinstance(datetime__gte, (datetime.datetime, datetime.date)):
datetime__gte = datetime__gte.strftime('%Y-%m-%d %H:%M:%S')
datetime_range['gte'] = datetime__gte
if datetime__lte:
if isinstance(datetime__lte, datetime.datetime):
if isinstance(datetime__lte, (datetime.datetime, datetime.date)):
datetime__lte = datetime__lte.strftime('%Y-%m-%d %H:%M:%S')
datetime_range['lte'] = datetime__lte
return 'datetime', datetime_range
return date_field_name, datetime_range
@staticmethod
def handle_exact_fields(exact):
@ -306,33 +326,50 @@ class ES(object):
})
return _filter
def get_query_body(self, **kwargs):
@staticmethod
def __handle_field(key, value):
if isinstance(value, UUID):
value = str(value)
if key == 'pk':
key = 'id'
if key.endswith('__in'):
key = key.replace('__in', '')
return key, value
def __build_special_query_body(self, query_kwargs):
match, exact = {}, {}
for k, v in query_kwargs.items():
k, v = self.__handle_field(k, v)
if k in self.exact_fields:
exact[k] = v
elif k in self.match_fields:
match[k] = v
result = self.handle_exact_fields(exact) + [
{'match': {k: v}} for k, v in match.items()
]
return result
def _get_query_body(self, query_params=None, fields=None):
query_params = query_params or {}
not_kwargs = query_params.pop('__not', {})
or_kwargs = query_params.pop('__or', {})
new_kwargs = {}
for k, v in kwargs.items():
if isinstance(v, UUID):
v = str(v)
if k == 'pk':
k = 'id'
if k.endswith('__in'):
k = k.replace('__in', '')
for k, v in query_params.items():
k, v = self.__handle_field(k, v)
new_kwargs[k] = v
kwargs = new_kwargs
index_in_field = 'id__in'
exact_fields = self.exact_fields
match_fields = self.match_fields
match = {}
exact = {}
index = {}
match, exact, index = {}, {}, {}
if index_in_field in kwargs:
index['values'] = kwargs[index_in_field]
for k, v in kwargs.items():
if k in exact_fields:
if k in self.exact_fields:
exact[k] = v
elif k in match_fields:
elif k in self.match_fields:
match[k] = v
# 处理时间
@ -363,25 +400,29 @@ class ES(object):
body = {
'query': {
'bool': {
'must_not': self.__build_special_query_body(not_kwargs),
'must': [
{'match': {k: v}} for k, v in match.items()
],
'should': should,
'filter': self.handle_exact_fields(exact) +
[
{
'range': {
time_field_name: time_range
}
}
{'match': {k: v}} for k, v in match.items()
] + self.handle_exact_fields(exact),
'should': should + self.__build_special_query_body(or_kwargs),
'filter': [
{'range': {time_field_name: time_range}}
] + [
{
'ids': {k: v}
} for k, v in index.items()
{'ids': {k: v}} for k, v in index.items()
]
}
},
}
if len(body['query']['bool']['should']) > 0:
body['query']['bool']['minimum_should_match'] = 1
body = self.part_fields_query(body, fields)
return json.loads(json.dumps(body, cls=ModelJSONFieldEncoder))
@staticmethod
def part_fields_query(body, fields):
if not fields:
return body
body['_source'] = list(fields)
return body
@ -399,18 +440,25 @@ class QuerySet(DJQuerySet):
@lazyproperty
def _grouped_method_calls(self):
_method_calls = {k: list(v) for k, v in groupby(self._method_calls, lambda x: x[0])}
_method_calls = {}
for sub_action, sub_args, sub_kwargs in self._method_calls:
args, kwargs = _method_calls.get(sub_action, ([], {}))
args.extend(sub_args)
kwargs.update(sub_kwargs)
_method_calls[sub_action] = (args, kwargs)
return _method_calls
@lazyproperty
def _filter_kwargs(self):
_method_calls = self._grouped_method_calls
filter_calls = _method_calls.get('filter')
if not filter_calls:
return {}
names, multi_args, multi_kwargs = zip(*filter_calls)
kwargs = reduce(lambda x, y: {**x, **y}, multi_kwargs, {})
def _merge_kwargs(self, filter_args, exclude_args, filter_kwargs, exclude_kwargs):
or_filter = {}
for f_arg in filter_args:
if f_arg.connector == Q.OR:
or_filter.update({c[0]: c[1] for c in f_arg.children})
filter_kwargs['__or'] = self.striped_kwargs(or_filter)
filter_kwargs['__not'] = self.striped_kwargs(exclude_kwargs)
return self.striped_kwargs(filter_kwargs)
@staticmethod
def striped_kwargs(kwargs):
striped_kwargs = {}
for k, v in kwargs.items():
k = k.replace('__exact', '')
@ -420,28 +468,32 @@ class QuerySet(DJQuerySet):
return striped_kwargs
@lazyproperty
def _sort(self):
order_by = self._grouped_method_calls.get('order_by')
if order_by:
for call in reversed(order_by):
fields = call[1]
if fields:
field = fields[-1]
def _filter_kwargs(self):
f_args, f_kwargs = self._grouped_method_calls.get('filter', ((), {}))
e_args, e_kwargs = self._grouped_method_calls.get('exclude', ((), {}))
if not f_kwargs and not e_kwargs:
return {}
return self._merge_kwargs(f_args, e_args, f_kwargs, e_kwargs)
if field.startswith('-'):
direction = 'desc'
else:
direction = 'asc'
field = field.lstrip('-+')
sort = self._storage.client.get_sort(field, direction)
return sort
@lazyproperty
def _sort(self):
order_by = self._grouped_method_calls.get('order_by', ((), {}))[0]
for field in order_by:
if field.startswith('-'):
direction = 'desc'
else:
direction = 'asc'
field = field.lstrip('-+')
sort = self._storage.client.get_sort(field, direction)
return sort
def __execute(self):
_filter_kwargs = self._filter_kwargs
_sort = self._sort
from_, size = self._slice or (None, None)
data = self._storage.filter(_filter_kwargs, from_=from_, size=size, sort=_sort)
return self.model.from_multi_dict(data)
_vl_args, _vl_kwargs = self._grouped_method_calls.get('values_list', ((), {}))
from_, size = self._slice or (0, 20)
data = self._storage.filter(
self._filter_kwargs, from_=from_, size=size, sort=self._sort, fields=_vl_args
)
return self.model.from_multi_dict(data, flat=bool(_vl_args))
def __stage_method_call(self, item, *args, **kwargs):
_clone = self.__clone()
@ -455,13 +507,22 @@ class QuerySet(DJQuerySet):
uqs.model = self.model
return uqs
def first(self):
self._slice = (0, 1)
data = self.__execute()
return self.model.from_dict(data[0]) if data else None
def get(self, **kwargs):
kwargs.update(self._filter_kwargs)
return self._storage.get(kwargs)
item = self._storage.get(kwargs)
if not item:
raise JMSObjectDoesNotExist(
object_name=self.model._meta.verbose_name # noqa
)
return self.model.from_dict(item)
def count(self, limit_to_max_result_window=True):
filter_kwargs = self._filter_kwargs
count = self._storage.count(**filter_kwargs)
count = self._storage.count(**self._filter_kwargs)
if limit_to_max_result_window:
count = min(count, self.max_result_window)
return count

View File

View File

@ -0,0 +1,76 @@
from copy import deepcopy
from typing import Callable
from django.db import models
from django.utils.translation import gettext_lazy as _
from rest_framework.mixins import DestroyModelMixin, RetrieveModelMixin
from rest_framework.response import Response
from common.db.fields import EncryptJsonDictTextField
from common.exceptions import JMSException
class CommonStorageModelMixin(models.Model):
id: str
type: str
meta: dict
objects: models.Manager
name = models.CharField(max_length=128, verbose_name=_("Name"), unique=True)
meta = EncryptJsonDictTextField(default={})
is_default = models.BooleanField(default=False, verbose_name=_("Default"))
class Meta:
abstract = True
def __str__(self):
return self.name
@property
def config(self):
config = deepcopy(self.meta)
config['TYPE'] = self.type
return config
def set_to_default(self):
self.is_default = True
self.save(update_fields=["is_default"])
self.__class__.objects.select_for_update().filter(is_default=True).exclude(
id=self.id
).update(is_default=False)
@classmethod
def default(cls):
objs = cls.objects.filter(is_default=True)
if not objs:
objs = cls.objects.filter(name="default", type="server")
if not objs:
objs = cls.objects.all()
return objs.first()
class StorageDestroyModelMixin(DestroyModelMixin):
def perform_destroy(self, instance):
if instance.type_null_or_server or instance.is_default:
raise JMSException(detail=_('Deleting the default storage is not allowed'))
if instance.is_use():
raise JMSException(detail=_('Cannot delete storage that is being used'))
return super().perform_destroy(instance)
class StorageTestConnectiveMixin(RetrieveModelMixin):
get_object: Callable
def retrieve(self, request, *args, **kwargs):
instance = self.get_object()
try:
is_valid = instance.is_valid()
except Exception as e:
is_valid = False
msg = _("Test failure: {}".format(str(e)))
else:
if is_valid:
msg = _("Test successful")
else:
msg = _("Test failure: Please check configuration")
return Response({'is_valid': is_valid, 'msg': msg})

View File

@ -0,0 +1,106 @@
from urllib.parse import urlparse
from django.utils.translation import gettext_lazy as _
from django.db.models import Manager
from rest_framework import serializers
from rest_framework.validators import UniqueValidator
from common.serializers import MethodSerializer
from common.serializers.fields import ReadableHiddenField
storage_default_help_text = _(
'set as the default storage, will make new Component use the current '
'storage by default, without affecting existing Component'
)
class BaseStorageSerializer(serializers.ModelSerializer):
name = serializers.CharField(required=True, max_length=128, label=_("Name"))
storage_type_serializer_classes_mapping = {}
meta = MethodSerializer()
class Meta:
model: Manager | None = None
fields = ['id', 'name', 'type', 'meta', 'is_default', 'comment']
extra_kwargs = {
'is_default': {'help_text': storage_default_help_text},
}
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
model = self.__class__.Meta.model
self.fields['name'].validators.append(UniqueValidator(queryset=model.objects.all()))
def validate_meta(self, meta):
_meta = self.instance.meta if self.instance else {}
_meta.update(meta)
return _meta
def get_meta_serializer(self):
default_serializer = serializers.Serializer(read_only=True)
if isinstance(self.instance, self.__class__.Meta.model):
_type = self.instance.type
else:
_type = self.context['request'].query_params.get('type')
type_serializer = self.storage_type_serializer_classes_mapping.get(_type)
serializer_class = type_serializer or default_serializer
if isinstance(serializer_class, type):
serializer = serializer_class()
else:
serializer = serializer_class
return serializer
def to_representation(self, instance):
data = super().to_representation(instance)
need_translate_comments = {
'Store locally': _('Store locally'),
'Do not save': _('Do not save')
}
comment = instance.comment
data['comment'] = need_translate_comments.get(comment, comment)
return data
def es_host_format_validator(host):
if '#' in host:
raise serializers.ValidationError(_('The address cannot contain the special character `#`'))
h = urlparse(host)
default_error_msg = _('The address format is incorrect')
if h.scheme not in ['http', 'https']:
raise serializers.ValidationError(default_error_msg)
if ':' not in h.netloc:
raise serializers.ValidationError(default_error_msg)
_host, _port = h.netloc.rsplit(':', maxsplit=1)
if not _host:
error_msg = _('Host invalid')
raise serializers.ValidationError(error_msg)
if not _port.isdigit():
error_msg = _('Port invalid')
raise serializers.ValidationError(error_msg)
return host
class StorageTypeESSerializer(serializers.Serializer):
HOSTS = serializers.ListField(
child=serializers.CharField(validators=[es_host_format_validator]),
label=_('Hosts'), help_text=_(
'If there are multiple hosts, use a comma (,) to separate them. <br>'
'(For example: http://www.jumpserver.a.com:9100, http://www.jumpserver.b.com:9100)'),
allow_null=True
)
INDEX_BY_DATE = serializers.BooleanField(
default=False, label=_('Index by date'),
help_text=_('Whether to create an index by date')
)
INDEX = serializers.CharField(
max_length=1024, default='jumpserver', label=_('Index'), allow_null=True
)
DOC_TYPE = ReadableHiddenField(default='_doc', label=_('Doc type'), allow_null=True)
IGNORE_VERIFY_CERTS = serializers.BooleanField(
default=False, label=_('Ignore Certificate Verification'),
source='OTHER.IGNORE_VERIFY_CERTS', allow_null=True,
)

View File

@ -101,6 +101,10 @@ msgstr ""
msgid "An exception occurred during task execution"
msgstr ""
#: audits/serializers.py:204 audits/serializers.py:215
msgid "Log types"
msgstr "Log types"
#: accounts/automations/backup_account/manager.py:23
msgid "The account backup plan is being executed"
msgstr ""

View File

@ -81,6 +81,10 @@ msgstr "成功"
msgid "Failed"
msgstr "失敗しました"
#: audits/serializers.py:204 audits/serializers.py:215
msgid "Log types"
msgstr "ログの種類"
#: accounts/automations/backup_account/handlers.py:248
msgid "The backup task has no assigned sftp server"
msgstr "このバックアップタスクはsftpサーバーに割り当てられていません"

View File

@ -203,6 +203,10 @@ msgstr "账号推送"
msgid "Change secret"
msgstr "更改密码"
#: audits/serializers.py:204 audits/serializers.py:215
msgid "Log types"
msgstr "日志类型"
#: accounts/const/automation.py:26
msgid "Verify account"
msgstr "验证账号"

View File

@ -119,6 +119,10 @@ msgstr "未找到待處理帳戶"
msgid "Success: %s, Failed: %s, Total: %s"
msgstr "成功: %s, 失敗: %s, 總數: %s"
#: audits/serializers.py:204 audits/serializers.py:215
msgid "Log types"
msgstr "日誌類型"
#: accounts/const/account.py:6
#: accounts/serializers/automations/change_secret.py:34
#: audits/signal_handlers/login_log.py:34 authentication/confirm/password.py:9

View File

@ -158,6 +158,7 @@
"AttrName": "Attribute name",
"AttrValue": "Attribute value",
"Audits": "Audits",
"AuditsLogHelpMsg": "The storage location can be changed in System Settings / Storage Settings / Log Storage",
"Auth": "Authentication",
"AuthConfig": "Authentication",
"AuthLimit": "Login restriction",
@ -699,6 +700,7 @@
"LogoLogoutTip": "Tip: it will be displayed on the web terminal page of enterprise edition users (recommended image size: 82px*82px)",
"Logout": "Sign out",
"LogsAudit": "Activities",
"LogStorage": "LogStorage",
"Lowercase": "Lowercase",
"LunaSetting": "Luna",
"MFAErrorMsg": "MFA errors, please check",

View File

@ -158,6 +158,7 @@
"AttrName": "属性名",
"AttrValue": "プロパティ値",
"Audits": "監査台",
"AuditsLogHelpMsg": "システム設定 / ストレージ設定 / ログストレージ で、保存場所を変更できます",
"Auth": "認証設定",
"AuthConfig": "資格認定構成",
"AuthLimit": "ログイン制限",
@ -724,6 +725,7 @@
"LogoLogoutTip": "ヒントこれは、エンタープライズ版ユーザーのWebターミナルページに表示されます推奨画像サイズ82px*82px",
"Logout": "ログアウト",
"LogsAudit": "ログ監査",
"LogStorage": "ログストレージ",
"Lowercase": "小文字",
"LunaSetting": "Luna 設定",
"MFAErrorMsg": "MFAエラー、確認してください",

View File

@ -158,6 +158,7 @@
"AttrName": "属性名",
"AttrValue": "属性值",
"Audits": "审计台",
"AuditsLogHelpMsg": "可在 系统设置 / 存储设置 / 日志存储 中更改存储位置",
"Auth": "认证设置",
"AuthConfig": "配置认证",
"AuthLimit": "登录限制",
@ -701,6 +702,7 @@
"LogoLogoutTip": "提示:将会显示在企业版用户的 Web 终端页面建议图片大小为82px*82px",
"Logout": "退出登录",
"LogsAudit": "日志审计",
"LogStorage": "日志存储",
"Lowercase": "小写字母",
"LunaSetting": "Luna 配置",
"MFAErrorMsg": "MFA错误请检查",

View File

@ -212,6 +212,7 @@
"AttrValue": "屬性值",
"Auditor": "審計員",
"Audits": "審計台",
"AuditsLogHelpMsg": "可在 系統設定 / 儲存設定 / 日誌儲存 中更改儲存位置",
"Auth": "認證設置",
"AuthConfig": "配寘認證資訊",
"AuthLimit": "登入限制",
@ -932,6 +933,7 @@
"LogoLogoutTip": "提示:將會顯示在企業版使用者的 Web 終端頁面建議圖片大小為82px*82px",
"Logout": "退出登入",
"LogsAudit": "日誌審計",
"LogStorage": "日誌儲存",
"Lowercase": "小寫字母",
"LunaSetting": "Luna 設定",
"LunaSettingUpdate": "Luna 配置設置",

View File

@ -2,7 +2,7 @@ import time
from collections import defaultdict
from django.core.cache import cache
from django.db.models import Count, Max, F, CharField
from django.db.models import Count, Max, F, CharField, Manager
from django.db.models.functions import Cast
from django.http.response import JsonResponse, HttpResponse
from django.utils import timezone
@ -14,9 +14,9 @@ from rest_framework.views import APIView
from assets.const import AllTypes
from assets.models import Asset
from audits.api import OperateLogViewSet
from audits.const import LoginStatusChoices
from audits.models import UserLoginLog, PasswordChangeLog, OperateLog, FTPLog, JobLog
from audits.backends import get_log_storage
from audits.const import LoginStatusChoices, LogType
from audits.models import JobLog
from audits.utils import construct_userlogin_usernames
from common.utils import lazyproperty
from common.utils.timezone import local_now, local_zero_hour
@ -99,33 +99,35 @@ class DateTimeMixin:
@lazyproperty
def login_logs_queryset(self):
qs = UserLoginLog.objects.all()
qs = get_log_storage(LogType.login_log).get_manager().all()
qs = self.get_logs_queryset_filter(qs, 'datetime')
queryset = self.get_logs_queryset(qs, 'username')
return queryset
@lazyproperty
def user_login_logs_on_the_system_queryset(self):
qs = UserLoginLog.objects.filter(status=LoginStatusChoices.success)
qs = get_log_storage(LogType.login_log).get_manager().filter(
status=LoginStatusChoices.success
)
qs = self.get_logs_queryset_filter(qs, 'datetime')
queryset = qs.filter(username__in=construct_userlogin_usernames(self.users))
return queryset
@lazyproperty
def password_change_logs_queryset(self):
qs = PasswordChangeLog.objects.all()
qs = get_log_storage(LogType.password_change_log).get_manager().all()
qs = self.get_logs_queryset_filter(qs, 'datetime')
queryset = self.get_logs_queryset(qs, 'user')
return queryset
@lazyproperty
def operate_logs_queryset(self):
qs = OperateLogViewSet().get_queryset()
qs = get_log_storage(LogType.operate_log).get_manager()
return self.get_logs_queryset_filter(qs, 'datetime')
@lazyproperty
def ftp_logs_queryset(self):
qs = FTPLog.objects.all()
qs = get_log_storage(LogType.ftp_log).get_manager().all()
return self.get_logs_queryset_filter(qs, 'date_start')
@lazyproperty
@ -148,13 +150,13 @@ class DatesLoginMetricMixin:
dates_list: list
date_start_end: tuple
command_type_queryset_tuple: tuple
sessions_queryset: Session.objects
ftp_logs_queryset: FTPLog.objects
job_logs_queryset: JobLog.objects
login_logs_queryset: UserLoginLog.objects
user_login_logs_on_the_system_queryset: UserLoginLog.objects
operate_logs_queryset: OperateLog.objects
password_change_logs_queryset: PasswordChangeLog.objects
sessions_queryset: Manager
ftp_logs_queryset: Manager
job_logs_queryset: Manager
login_logs_queryset: Manager
user_login_logs_on_the_system_queryset: Manager
operate_logs_queryset: Manager
password_change_logs_queryset: Manager
@lazyproperty
def get_type_to_assets(self):
@ -203,8 +205,9 @@ class DatesLoginMetricMixin:
return date_metrics_dict.get('user_id', []), date_metrics_dict.get('asset_id', [])
def get_dates_metrics_total_count_login(self):
queryset = get_log_storage(LogType.login_log).get_manager()
date_metrics_dict = self.get_date_metrics(
UserLoginLog.objects, 'datetime', 'id'
queryset, 'datetime', 'id'
)
return date_metrics_dict.get('id', [])

View File

@ -593,8 +593,6 @@ class Config(dict):
'SERVER_REPLAY_STORAGE': {},
'SECURITY_DATA_CRYPTO_ALGO': None,
'GMSSL_ENABLED': False,
# 操作日志变更字段的存储ES配置
'OPERATE_LOG_ELASTICSEARCH_CONFIG': {},
# Magnus 组件需要监听的 Oracle 端口范围
'MAGNUS_ORACLE_PORTS': '30000-30030',

View File

@ -207,8 +207,6 @@ HELP_SUPPORT_URL = CONFIG.HELP_SUPPORT_URL
SESSION_RSA_PRIVATE_KEY_NAME = 'jms_private_key'
SESSION_RSA_PUBLIC_KEY_NAME = 'jms_public_key'
OPERATE_LOG_ELASTICSEARCH_CONFIG = CONFIG.OPERATE_LOG_ELASTICSEARCH_CONFIG
MAX_LIMIT_PER_PAGE = CONFIG.MAX_LIMIT_PER_PAGE
DEFAULT_PAGE_SIZE = CONFIG.DEFAULT_PAGE_SIZE
PERM_TREE_REGEN_INTERVAL = CONFIG.PERM_TREE_REGEN_INTERVAL

View File

@ -3,6 +3,7 @@ import logging
import os
import sys
import uuid
from collections import defaultdict
from datetime import timedelta
@ -22,6 +23,7 @@ from acls.models import CommandFilterACL
from assets.models import Asset
from assets.automations.base.manager import SSHTunnelManager
from common.db.encoder import ModelJSONFieldEncoder
from common.utils import lazyproperty
from ops.ansible import JMSInventory, AdHocRunner, PlaybookRunner, UploadFileRunner
"""stop all ssh child processes of the given ansible process pid."""

View File

@ -1,6 +1,5 @@
# coding: utf-8
import datetime
import time
from celery import shared_task
from celery.exceptions import SoftTimeLimitExceeded

View File

@ -4,14 +4,15 @@
from django.utils.translation import gettext_lazy as _
from django_filters import rest_framework as drf_filters
from django_filters import utils
from rest_framework import viewsets, generics, status
from rest_framework import viewsets, generics
from rest_framework.decorators import action
from rest_framework.request import Request
from rest_framework.response import Response
from common.api.mixin import CommonApiMixin
from common.const.http import GET
from common.drf.filters import BaseFilterSet
from common.api.mixin import CommonApiMixin
from common.storage.mixins import StorageDestroyModelMixin, StorageTestConnectiveMixin
from terminal import const
from terminal.filters import CommandStorageFilter, CommandFilter, CommandFilterForStorageTree
from terminal.models import CommandStorage, ReplayStorage
@ -23,20 +24,9 @@ __all__ = [
]
class BaseStorageViewSetMixin(CommonApiMixin):
def destroy(self, request, *args, **kwargs):
instance = self.get_object()
if instance.type_null_or_server or instance.is_default:
data = {'msg': _('Deleting the default storage is not allowed')}
return Response(data=data, status=status.HTTP_400_BAD_REQUEST)
if instance.is_use():
data = {'msg': _('Cannot delete storage that is being used')}
return Response(data=data, status=status.HTTP_400_BAD_REQUEST)
return super().destroy(request, *args, **kwargs)
class CommandStorageViewSet(BaseStorageViewSetMixin, viewsets.ModelViewSet):
class CommandStorageViewSet(
StorageDestroyModelMixin, CommonApiMixin, viewsets.ModelViewSet
):
search_fields = ('name', 'type')
queryset = CommandStorage.objects.all()
serializer_class = CommandStorageSerializer
@ -113,41 +103,23 @@ class ReplayStorageFilterSet(BaseFilterSet):
fields = ['name', 'type', 'is_default', 'type_not']
class ReplayStorageViewSet(BaseStorageViewSetMixin, viewsets.ModelViewSet):
class ReplayStorageViewSet(
StorageDestroyModelMixin, CommonApiMixin, viewsets.ModelViewSet
):
search_fields = ('name', 'type', 'is_default')
queryset = ReplayStorage.objects.all()
serializer_class = ReplayStorageSerializer
filterset_class = ReplayStorageFilterSet
class BaseStorageTestConnectiveMixin:
def retrieve(self, request, *args, **kwargs):
instance = self.get_object()
try:
is_valid = instance.is_valid()
except Exception as e:
is_valid = False
msg = _("Test failure: {}".format(str(e)))
else:
if is_valid:
msg = _("Test successful")
else:
msg = _("Test failure: Please check configuration")
data = {
'is_valid': is_valid,
'msg': msg
}
return Response(data)
class CommandStorageTestConnectiveApi(BaseStorageTestConnectiveMixin, generics.RetrieveAPIView):
class CommandStorageTestConnectiveApi(StorageTestConnectiveMixin, generics.RetrieveAPIView):
queryset = CommandStorage.objects.all()
rbac_perms = {
'retrieve': 'terminal.view_commandstorage'
}
class ReplayStorageTestConnectiveApi(BaseStorageTestConnectiveMixin, generics.RetrieveAPIView):
class ReplayStorageTestConnectiveApi(StorageTestConnectiveMixin, generics.RetrieveAPIView):
queryset = ReplayStorage.objects.all()
rbac_perms = {
'retrieve': 'terminal.view_replaystorage'

View File

@ -23,7 +23,7 @@ from common.drf.filters import BaseFilterSet
from common.drf.filters import DatetimeRangeFilterBackend
from common.drf.renders import PassthroughRenderer
from common.permissions import IsServiceAccount
from common.storage.replay import ReplayStorageHandler
from common.storage.backends.replay import ReplayStorageHandler
from common.utils import data_to_json, is_uuid, i18n_fmt
from common.utils import get_logger, get_object_or_none
from common.views.mixins import RecordViewLogMixin

View File

@ -9,9 +9,9 @@ from django.conf import settings
from django.db import models
from django.utils.translation import gettext_lazy as _
from common.db.fields import EncryptJsonDictTextField
from common.db.models import JMSBaseModel
from common.plugins.es import QuerySet as ESQuerySet
from common.storage.mixins import CommonStorageModelMixin
from common.utils import get_logger
from common.utils.timezone import local_now_date_display
from terminal import const
@ -22,34 +22,6 @@ from ..session.command import Command
logger = get_logger(__file__)
class CommonStorageModelMixin(models.Model):
name = models.CharField(max_length=128, verbose_name=_("Name"), unique=True)
meta = EncryptJsonDictTextField(default={})
is_default = models.BooleanField(default=False, verbose_name=_("Default"))
class Meta:
abstract = True
def __str__(self):
return self.name
def set_to_default(self):
self.is_default = True
self.save(update_fields=["is_default"])
self.__class__.objects.select_for_update().filter(is_default=True).exclude(
id=self.id
).update(is_default=False)
@classmethod
def default(cls):
objs = cls.objects.filter(is_default=True)
if not objs:
objs = cls.objects.filter(name="default", type="server")
if not objs:
objs = cls.objects.all()
return objs.first()
class CommandStorage(CommonStorageModelMixin, JMSBaseModel):
type = models.CharField(
max_length=16,

View File

@ -6,11 +6,10 @@ from django.core.validators import MaxValueValidator, MinValueValidator, validat
from django.db.models import TextChoices
from django.utils.translation import gettext_lazy as _
from rest_framework import serializers
from rest_framework.validators import UniqueValidator
from common.serializers import MethodSerializer
from common.serializers.fields import LabeledChoiceField
from common.serializers.fields import ReadableHiddenField, EncryptedField
from common.serializers.fields import EncryptedField
from common.storage.serializers import BaseStorageSerializer, StorageTypeESSerializer
from .. import const
from ..models import ReplayStorage, CommandStorage
@ -166,101 +165,7 @@ replay_storage_type_serializer_classes_mapping = {
}
# Command storage serializers
# ---------------------------
def command_storage_es_host_format_validator(host):
if '#' in host:
raise serializers.ValidationError(_('The address cannot contain the special character `#`'))
h = urlparse(host)
default_error_msg = _('The address format is incorrect')
if h.scheme not in ['http', 'https']:
raise serializers.ValidationError(default_error_msg)
if ':' not in h.netloc:
raise serializers.ValidationError(default_error_msg)
_host, _port = h.netloc.rsplit(':', maxsplit=1)
if not _host:
error_msg = _('Host invalid')
raise serializers.ValidationError(error_msg)
if not _port.isdigit():
error_msg = _('Port invalid')
raise serializers.ValidationError(error_msg)
return host
class CommandStorageTypeESSerializer(serializers.Serializer):
HOSTS = serializers.ListField(
child=serializers.CharField(validators=[command_storage_es_host_format_validator]),
label=_('Hosts'), help_text=_(
'If there are multiple hosts, use a comma (,) to separate them. <br>'
'(For example: http://www.jumpserver.a.com:9100, http://www.jumpserver.b.com:9100)'),
allow_null=True
)
INDEX_BY_DATE = serializers.BooleanField(
default=False, label=_('Index by date'),
help_text=_('Whether to create an index by date')
)
INDEX = serializers.CharField(
max_length=1024, default='jumpserver', label=_('Index'), allow_null=True
)
DOC_TYPE = ReadableHiddenField(default='_doc', label=_('Doc type'), allow_null=True)
IGNORE_VERIFY_CERTS = serializers.BooleanField(
default=False, label=_('Ignore Certificate Verification'),
source='OTHER.IGNORE_VERIFY_CERTS', allow_null=True,
)
# mapping
command_storage_type_serializer_classes_mapping = {
const.CommandStorageType.es.value: CommandStorageTypeESSerializer
}
# BaseStorageSerializer
class BaseStorageSerializer(serializers.ModelSerializer):
storage_type_serializer_classes_mapping = {}
meta = MethodSerializer()
class Meta:
model = None
fields = ['id', 'name', 'type', 'meta', 'is_default', 'comment']
def validate_meta(self, meta):
_meta = self.instance.meta if self.instance else {}
_meta.update(meta)
return _meta
def get_meta_serializer(self):
default_serializer = serializers.Serializer(read_only=True)
if isinstance(self.instance, self.__class__.Meta.model):
_type = self.instance.type
else:
_type = self.context['request'].query_params.get('type')
if _type:
serializer_class = self.storage_type_serializer_classes_mapping.get(_type)
else:
serializer_class = default_serializer
if not serializer_class:
serializer_class = default_serializer
if isinstance(serializer_class, type):
serializer = serializer_class()
else:
serializer = serializer_class
return serializer
def to_representation(self, instance):
data = super().to_representation(instance)
need_translate_comments = {
'Store locally': _('Store locally'),
'Do not save': _('Do not save')
}
comment = instance.comment
data['comment'] = need_translate_comments.get(comment, comment)
return data
class SaveHandleMixin(serializers.Serializer):
def save(self, **kwargs):
instance = super().save(**kwargs)
if self.validated_data.get('is_default', False):
@ -268,38 +173,25 @@ class BaseStorageSerializer(serializers.ModelSerializer):
return instance
meta_is_default = {
'help_text': _(
'set as the default storage, will make new Component use the current '
'storage by default, without affecting existing Component'
)
}
# CommandStorageSerializer
class CommandStorageSerializer(BaseStorageSerializer):
# Command storage serializers
# ---------------------------
class CommandStorageSerializer(SaveHandleMixin, BaseStorageSerializer):
type = LabeledChoiceField(choices=const.CommandStorageType.choices, label=_('Type'))
storage_type_serializer_classes_mapping = command_storage_type_serializer_classes_mapping
storage_type_serializer_classes_mapping = {
const.CommandStorageType.es.value: StorageTypeESSerializer
}
class Meta(BaseStorageSerializer.Meta):
model = CommandStorage
extra_kwargs = {
'name': {'validators': [UniqueValidator(queryset=CommandStorage.objects.all())]},
'is_default': meta_is_default
}
# ReplayStorageSerializer
class ReplayStorageSerializer(BaseStorageSerializer):
class ReplayStorageSerializer(SaveHandleMixin, BaseStorageSerializer):
type = LabeledChoiceField(choices=const.ReplayStorageType.choices, label=_('Type'))
storage_type_serializer_classes_mapping = replay_storage_type_serializer_classes_mapping
class Meta(BaseStorageSerializer.Meta):
model = ReplayStorage
extra_kwargs = {
'name': {'validators': [UniqueValidator(queryset=ReplayStorage.objects.all())]},
'is_default': meta_is_default
}
def validate_is_default(self, value):
if self.initial_data.get('type') == const.ReplayStorageType.sftp.value:

View File

@ -10,7 +10,7 @@ from django.core.files.storage import default_storage
from django.utils import timezone
from django.utils.translation import gettext_lazy as _
from common.storage.replay import ReplayStorageHandler
from common.storage.backends.replay import ReplayStorageHandler
from ops.celery.decorator import (
register_as_period_task, after_app_ready_start)
from orgs.utils import tmp_to_builtin_org