diff --git a/apps/audits/api.py b/apps/audits/api.py index 2a1409f95..fd977ab23 100644 --- a/apps/audits/api.py +++ b/apps/audits/api.py @@ -3,6 +3,7 @@ from importlib import import_module from django.conf import settings +from django.db.models import F, Value, CharField from rest_framework import generics from rest_framework.permissions import IsAuthenticated from rest_framework.mixins import ListModelMixin, CreateModelMixin, RetrieveModelMixin @@ -14,11 +15,12 @@ from common.plugins.es import QuerySet as ESQuerySet from orgs.utils import current_org, tmp_to_root_org from orgs.mixins.api import OrgGenericViewSet, OrgBulkModelViewSet from .backends import TYPE_ENGINE_MAPPING -from .models import FTPLog, UserLoginLog, OperateLog, PasswordChangeLog +from .const import ActivityChoices +from .models import FTPLog, UserLoginLog, OperateLog, PasswordChangeLog, ActivityLog from .serializers import FTPLogSerializer, UserLoginLogSerializer, JobAuditLogSerializer from .serializers import ( OperateLogSerializer, OperateLogActionDetailSerializer, - PasswordChangeLogSerializer, ActivitiesOperatorLogSerializer, + PasswordChangeLogSerializer, ActivityOperatorLogSerializer, ) @@ -47,8 +49,8 @@ class UserLoginCommonMixin: date_range_filter_fields = [ ('datetime', ('date_from', 'date_to')) ] - filterset_fields = ['username', 'ip', 'city', 'type', 'status', 'mfa'] - search_fields = ['username', 'ip', 'city'] + filterset_fields = ['id', 'username', 'ip', 'city', 'type', 'status', 'mfa'] + search_fields = ['id', 'username', 'ip', 'city'] class UserLoginLogViewSet(UserLoginCommonMixin, ListModelMixin, JMSGenericViewSet): @@ -77,17 +79,42 @@ class MyLoginLogAPIView(UserLoginCommonMixin, generics.ListAPIView): class ResourceActivityAPIView(generics.ListAPIView): - serializer_class = ActivitiesOperatorLogSerializer + serializer_class = ActivityOperatorLogSerializer rbac_perms = { - 'GET': 'audits.view_operatelog', + 'GET': 'audits.view_activitylog', } - def get_queryset(self): - resource_id = self.request.query_params.get('resource_id') - with tmp_to_root_org(): - queryset = OperateLog.objects.filter(resource_id=resource_id)[:30] + @staticmethod + def get_operate_log_qs(fields, limit=30, **filters): + queryset = OperateLog.objects.filter(**filters).annotate( + r_type=Value(ActivityChoices.operate_log, CharField()), + r_detail_id=F('id'), r_detail=Value(None, CharField()), + r_user=F('user'), r_action=F('action'), + ).values(*fields)[:limit] return queryset + @staticmethod + def get_activity_log_qs(fields, limit=30, **filters): + queryset = ActivityLog.objects.filter(**filters).annotate( + r_type=F('type'), r_detail_id=F('detail_id'), + r_detail=F('detail'), r_user=Value(None, CharField()), + r_action=Value(None, CharField()), + ).values(*fields)[:limit] + return queryset + + def get_queryset(self): + limit = 30 + resource_id = self.request.query_params.get('resource_id') + fields = ( + 'id', 'datetime', 'r_detail', 'r_detail_id', + 'r_user', 'r_action', 'r_type' + ) + with tmp_to_root_org(): + qs1 = self.get_operate_log_qs(fields, resource_id=resource_id) + qs2 = self.get_activity_log_qs(fields, resource_id=resource_id) + queryset = qs2.union(qs1) + return queryset[:limit] + class OperateLogViewSet(RetrieveModelMixin, ListModelMixin, OrgGenericViewSet): model = OperateLog diff --git a/apps/audits/const.py b/apps/audits/const.py index 1dc37de47..90df97a6c 100644 --- a/apps/audits/const.py +++ b/apps/audits/const.py @@ -35,6 +35,13 @@ class LoginTypeChoices(TextChoices): unknown = "U", _("Unknown") +class ActivityChoices(TextChoices): + operate_log = 'O', _('Operate log') + session_log = 'S', _('Session log') + login_log = 'L', _('Login log') + task = 'T', _('Task') + + class MFAChoices(IntegerChoices): disabled = 0, _("Disabled") enabled = 1, _("Enabled") diff --git a/apps/audits/handler.py b/apps/audits/handler.py index 41fd59c6c..0e7b540a2 100644 --- a/apps/audits/handler.py +++ b/apps/audits/handler.py @@ -130,58 +130,6 @@ class OperatorLogHandler(metaclass=Singleton): after = self.__data_processing(after) return before, after - @staticmethod - def _get_Session_params(resource, **kwargs): - # 更新会话的日志不在Activity中体现, - # 否则会话结束,录像文件结束操作的会话记录都会体现出来 - params = {} - action = kwargs.get('data', {}).get('action', 'create') - detail = _( - '{} used account[{}], login method[{}] login the asset.' - ).format( - resource.user, resource.account, resource.login_from_display - ) - if action == ActionChoices.create: - params = { - 'action': ActionChoices.connect, - 'resource_id': str(resource.asset_id), - 'user': resource.user, 'detail': detail - } - return params - - @staticmethod - def _get_ChangeSecretRecord_params(resource, **kwargs): - detail = _( - 'User {} has executed change auth plan for this account.({})' - ).format( - resource.created_by, _(resource.status.title()) - ) - return { - 'action': ActionChoices.change_auth, 'detail': detail, - 'resource_id': str(resource.account_id), - } - - @staticmethod - def _get_UserLoginLog_params(resource, **kwargs): - username = resource.username - login_status = _('Success') if resource.status else _('Failed') - detail = _('User {} login into this service.[{}]').format( - resource.username, login_status - ) - user_id = User.objects.filter(username=username).\ - values_list('id', flat=True)[0] - return { - 'action': ActionChoices.login, 'detail': detail, - 'resource_id': str(user_id), - } - - def _activity_handle(self, data, object_name, resource): - param_func = getattr(self, '_get_%s_params' % object_name, None) - if param_func is not None: - params = param_func(resource, data=data) - data.update(params) - return data - def create_or_update_operate_log( self, action, resource_type, resource=None, resource_display=None, force=False, log_id=None, before=None, after=None, @@ -207,7 +155,6 @@ class OperatorLogHandler(metaclass=Singleton): 'remote_addr': remote_addr, 'before': before, 'after': after, 'org_id': get_current_org_id(), } - data = self._activity_handle(data, object_name, resource=resource) with transaction.atomic(): if self.log_client.ping(timeout=1): client = self.log_client diff --git a/apps/audits/migrations/0021_auto_20230207_0857.py b/apps/audits/migrations/0021_auto_20230207_0857.py new file mode 100644 index 000000000..e9e8b939e --- /dev/null +++ b/apps/audits/migrations/0021_auto_20230207_0857.py @@ -0,0 +1,30 @@ +# Generated by Django 3.2.16 on 2023-02-07 00:57 + +from django.db import migrations, models +import uuid + + +class Migration(migrations.Migration): + + dependencies = [ + ('audits', '0020_auto_20230117_1004'), + ] + + operations = [ + migrations.CreateModel( + name='ActivityLog', + fields=[ + ('org_id', models.CharField(blank=True, db_index=True, default='', max_length=36, verbose_name='Organization')), + ('id', models.UUIDField(default=uuid.uuid4, primary_key=True, serialize=False)), + ('type', models.CharField(choices=[('O', 'Operate log'), ('S', 'Session log'), ('L', 'Login log'), ('T', 'Task')], default=None, max_length=2, null=True, verbose_name='Activity type')), + ('resource_id', models.CharField(blank=True, db_index=True, default='', max_length=36, verbose_name='Resource')), + ('datetime', models.DateTimeField(auto_now=True, db_index=True, verbose_name='Datetime')), + ('detail', models.TextField(blank=True, default='', verbose_name='Detail')), + ('detail_id', models.CharField(default=None, max_length=36, null=True, verbose_name='Detail ID')), + ], + options={ + 'verbose_name': 'Activity log', + 'ordering': ('-datetime',), + }, + ), + ] diff --git a/apps/audits/models.py b/apps/audits/models.py index 93fb6733b..29a67911f 100644 --- a/apps/audits/models.py +++ b/apps/audits/models.py @@ -12,6 +12,7 @@ from orgs.utils import current_org from .const import ( OperateChoices, ActionChoices, + ActivityChoices, LoginTypeChoices, MFAChoices, LoginStatusChoices, @@ -20,6 +21,7 @@ from .const import ( __all__ = [ "FTPLog", "OperateLog", + "ActivityLog", "PasswordChangeLog", "UserLoginLog", ] @@ -59,7 +61,6 @@ class OperateLog(OrgModelMixin): remote_addr = models.CharField(max_length=128, verbose_name=_("Remote addr"), blank=True, null=True) datetime = models.DateTimeField(auto_now=True, verbose_name=_('Datetime'), db_index=True) diff = models.JSONField(default=dict, encoder=ModelJSONFieldEncoder, null=True) - detail = models.CharField(max_length=128, null=True, blank=True, verbose_name=_('Detail')) def __str__(self): return "<{}> {} <{}>".format(self.user, self.action, self.resource) @@ -93,6 +94,34 @@ class OperateLog(OrgModelMixin): ordering = ('-datetime',) +class ActivityLog(OrgModelMixin): + id = models.UUIDField(default=uuid.uuid4, primary_key=True) + type = models.CharField( + choices=ActivityChoices.choices, max_length=2, + null=True, default=None, verbose_name=_("Activity type"), + ) + resource_id = models.CharField( + max_length=36, blank=True, default='', + db_index=True, verbose_name=_("Resource") + ) + datetime = models.DateTimeField( + auto_now=True, verbose_name=_('Datetime'), db_index=True + ) + detail = models.TextField(default='', blank=True, verbose_name=_('Detail')) + detail_id = models.CharField( + max_length=36, default=None, null=True, verbose_name=_('Detail ID') + ) + + class Meta: + verbose_name = _("Activity log") + ordering = ('-datetime',) + + def save(self, *args, **kwargs): + if current_org.is_root() and not self.org_id: + self.org_id = Organization.ROOT_ID + return super(ActivityLog, self).save(*args, **kwargs) + + class PasswordChangeLog(models.Model): id = models.UUIDField(default=uuid.uuid4, primary_key=True) user = models.CharField(max_length=128, verbose_name=_("User")) diff --git a/apps/audits/serializers.py b/apps/audits/serializers.py index 6700c3b68..bc260dbb7 100644 --- a/apps/audits/serializers.py +++ b/apps/audits/serializers.py @@ -5,6 +5,7 @@ from rest_framework import serializers from audits.backends.db import OperateLogStore from common.serializers.fields import LabeledChoiceField +from common.utils import reverse from common.utils.timezone import as_current_tz from ops.models.job import JobAuditLog from ops.serializers.job import JobExecutionSerializer @@ -13,7 +14,7 @@ from . import models from .const import ( ActionChoices, OperateChoices, MFAChoices, LoginStatusChoices, - LoginTypeChoices, + LoginTypeChoices, ActivityChoices, ) @@ -105,19 +106,44 @@ class SessionAuditSerializer(serializers.ModelSerializer): fields = "__all__" -class ActivitiesOperatorLogSerializer(serializers.Serializer): +class ActivityOperatorLogSerializer(serializers.Serializer): timestamp = serializers.SerializerMethodField() + detail_url = serializers.SerializerMethodField() content = serializers.SerializerMethodField() @staticmethod def get_timestamp(obj): - return as_current_tz(obj.datetime).strftime('%Y-%m-%d %H:%M:%S') + return as_current_tz(obj['datetime']).strftime('%Y-%m-%d %H:%M:%S') @staticmethod def get_content(obj): - action = obj.action.replace('_', ' ').capitalize() - if not obj.detail: - ctn = _('User {} {} this resource.').format(obj.user, _(action)) + if not obj['r_detail']: + action = obj['r_action'].replace('_', ' ').capitalize() + ctn = _('User {} {} this resource.').format(obj['r_user'], _(action)) else: - ctn = obj.detail + ctn = obj['r_detail'] return ctn + + @staticmethod + def get_detail_url(obj): + detail_url = '' + detail_id, obj_type = obj['r_detail_id'], obj['r_type'] + if not detail_id: + return detail_url + + if obj_type == ActivityChoices.operate_log: + detail_url = reverse( + view_name='audits:operate-log-detail', + kwargs={'pk': obj['id']}, + api_to_ui=True, is_audit=True + ) + elif obj_type == ActivityChoices.task: + detail_url = reverse( + 'ops:celery-task-log', kwargs={'pk': detail_id} + ) + elif obj_type == ActivityChoices.login_log: + detail_url = '%s?id=%s' % ( + reverse('api-audits:login-log-list', api_to_ui=True, is_audit=True), + detail_id + ) + return detail_url diff --git a/apps/audits/signal_handlers.py b/apps/audits/signal_handlers.py deleted file mode 100644 index 9741d93f6..000000000 --- a/apps/audits/signal_handlers.py +++ /dev/null @@ -1,316 +0,0 @@ -# -*- coding: utf-8 -*- -# -import uuid - -from django.apps import apps -from django.conf import settings -from django.contrib.auth import BACKEND_SESSION_KEY -from django.db import transaction -from django.db.models.signals import post_save, pre_save, m2m_changed, pre_delete -from django.dispatch import receiver -from django.utils import timezone, translation -from django.utils.functional import LazyObject -from django.utils.translation import ugettext_lazy as _ -from rest_framework.renderers import JSONRenderer -from rest_framework.request import Request - -from audits.handler import ( - get_instance_current_with_cache_diff, cache_instance_before_data, - create_or_update_operate_log, get_instance_dict_from_cache -) -from audits.utils import model_to_dict_for_operate_log as model_to_dict -from authentication.signals import post_auth_failed, post_auth_success -from authentication.utils import check_different_city_login_if_need -from common.const.signals import POST_ADD, POST_REMOVE, POST_CLEAR, SKIP_SIGNAL -from common.signals import django_ready -from common.utils import get_request_ip, get_logger, get_syslogger -from common.utils.encode import data_to_json -from jumpserver.utils import current_request -from terminal.models import Session, Command -from terminal.serializers import SessionSerializer, SessionCommandSerializer -from users.models import User -from users.signals import post_user_change_password -from . import models, serializers -from .const import MODELS_NEED_RECORD, ActionChoices -from .utils import write_login_log - -logger = get_logger(__name__) -sys_logger = get_syslogger(__name__) -json_render = JSONRenderer() - - -class AuthBackendLabelMapping(LazyObject): - @staticmethod - def get_login_backends(): - backend_label_mapping = {} - for source, backends in User.SOURCE_BACKEND_MAPPING.items(): - for backend in backends: - backend_label_mapping[backend] = source.label - backend_label_mapping[settings.AUTH_BACKEND_PUBKEY] = _("SSH Key") - backend_label_mapping[settings.AUTH_BACKEND_MODEL] = _("Password") - backend_label_mapping[settings.AUTH_BACKEND_SSO] = _("SSO") - backend_label_mapping[settings.AUTH_BACKEND_AUTH_TOKEN] = _("Auth Token") - backend_label_mapping[settings.AUTH_BACKEND_WECOM] = _("WeCom") - backend_label_mapping[settings.AUTH_BACKEND_FEISHU] = _("FeiShu") - backend_label_mapping[settings.AUTH_BACKEND_DINGTALK] = _("DingTalk") - backend_label_mapping[settings.AUTH_BACKEND_TEMP_TOKEN] = _("Temporary token") - return backend_label_mapping - - def _setup(self): - self._wrapped = self.get_login_backends() - - -AUTH_BACKEND_LABEL_MAPPING = AuthBackendLabelMapping() - -M2M_ACTION = { - POST_ADD: ActionChoices.create, - POST_REMOVE: ActionChoices.delete, - POST_CLEAR: ActionChoices.delete, -} - - -@receiver(m2m_changed) -def on_m2m_changed(sender, action, instance, reverse, model, pk_set, **kwargs): - if action not in M2M_ACTION: - return - if not instance: - return - - resource_type = instance._meta.verbose_name - current_instance = model_to_dict(instance, include_model_fields=False) - - instance_id = current_instance.get('id') - log_id, before_instance = get_instance_dict_from_cache(instance_id) - - field_name = str(model._meta.verbose_name) - objs = model.objects.filter(pk__in=pk_set) - objs_display = [str(o) for o in objs] - action = M2M_ACTION[action] - changed_field = current_instance.get(field_name, []) - - after, before, before_value = None, None, None - if action == ActionChoices.create: - before_value = list(set(changed_field) - set(objs_display)) - elif action == ActionChoices.delete: - before_value = list( - set(changed_field).symmetric_difference(set(objs_display)) - ) - - if changed_field: - after = {field_name: changed_field} - if before_value: - before = {field_name: before_value} - - if sorted(str(before)) == sorted(str(after)): - return - - create_or_update_operate_log( - ActionChoices.update, resource_type, - resource=instance, log_id=log_id, before=before, after=after - ) - - -def signal_of_operate_log_whether_continue(sender, instance, created, update_fields=None): - condition = True - if not instance: - condition = False - if instance and getattr(instance, SKIP_SIGNAL, False): - condition = False - # 终端模型的 create 事件由系统产生,不记录 - if instance._meta.object_name == 'Terminal' and created: - condition = False - # last_login 改变是最后登录日期, 每次登录都会改变 - if instance._meta.object_name == 'User' and update_fields and 'last_login' in update_fields: - condition = False - # 不在记录白名单中,跳过 - if sender._meta.object_name not in MODELS_NEED_RECORD: - condition = False - return condition - - -@receiver(pre_save) -def on_object_pre_create_or_update(sender, instance=None, raw=False, using=None, update_fields=None, **kwargs): - ok = signal_of_operate_log_whether_continue( - sender, instance, False, update_fields - ) - if not ok: - return - - # users.PrivateToken Model 没有 id 有 pk字段 - instance_id = getattr(instance, 'id', getattr(instance, 'pk', None)) - instance_before_data = {'id': instance_id} - raw_instance = type(instance).objects.filter(pk=instance_id).first() - - if raw_instance: - instance_before_data = model_to_dict(raw_instance) - operate_log_id = str(uuid.uuid4()) - instance_before_data['operate_log_id'] = operate_log_id - setattr(instance, 'operate_log_id', operate_log_id) - cache_instance_before_data(instance_before_data) - - -@receiver(post_save) -def on_object_created_or_update(sender, instance=None, created=False, update_fields=None, **kwargs): - ok = signal_of_operate_log_whether_continue( - sender, instance, created, update_fields - ) - if not ok: - return - - log_id, before, after = None, None, None - if created: - action = models.ActionChoices.create - after = model_to_dict(instance) - log_id = getattr(instance, 'operate_log_id', None) - else: - action = ActionChoices.update - current_instance = model_to_dict(instance) - log_id, before, after = get_instance_current_with_cache_diff(current_instance) - - resource_type = sender._meta.verbose_name - object_name = sender._meta.object_name - create_or_update_operate_log( - action, resource_type, resource=instance, log_id=log_id, - before=before, after=after, object_name=object_name - ) - - -@receiver(pre_delete) -def on_object_delete(sender, instance=None, **kwargs): - ok = signal_of_operate_log_whether_continue(sender, instance, False) - if not ok: - return - - resource_type = sender._meta.verbose_name - create_or_update_operate_log( - ActionChoices.delete, resource_type, - resource=instance, before=model_to_dict(instance) - ) - - -@receiver(post_user_change_password, sender=User) -def on_user_change_password(sender, user=None, **kwargs): - if not current_request: - remote_addr = '127.0.0.1' - change_by = 'System' - else: - remote_addr = get_request_ip(current_request) - if not current_request.user.is_authenticated: - change_by = str(user) - else: - change_by = str(current_request.user) - with transaction.atomic(): - models.PasswordChangeLog.objects.create( - user=str(user), change_by=change_by, - remote_addr=remote_addr, - ) - - -def on_audits_log_create(sender, instance=None, **kwargs): - if sender == models.UserLoginLog: - category = "login_log" - serializer_cls = serializers.UserLoginLogSerializer - elif sender == models.FTPLog: - category = "ftp_log" - serializer_cls = serializers.FTPLogSerializer - elif sender == models.OperateLog: - category = "operation_log" - serializer_cls = serializers.OperateLogSerializer - elif sender == models.PasswordChangeLog: - category = "password_change_log" - serializer_cls = serializers.PasswordChangeLogSerializer - elif sender == Session: - category = "host_session_log" - serializer_cls = SessionSerializer - elif sender == Command: - category = "session_command_log" - serializer_cls = SessionCommandSerializer - else: - return - - serializer = serializer_cls(instance) - data = data_to_json(serializer.data, indent=None) - msg = "{} - {}".format(category, data) - sys_logger.info(msg) - - -def get_login_backend(request): - backend = request.session.get('auth_backend', '') or \ - request.session.get(BACKEND_SESSION_KEY, '') - - backend_label = AUTH_BACKEND_LABEL_MAPPING.get(backend, None) - if backend_label is None: - backend_label = '' - return backend_label - - -def generate_data(username, request, login_type=None): - user_agent = request.META.get('HTTP_USER_AGENT', '') - login_ip = get_request_ip(request) or '0.0.0.0' - - if login_type is None and isinstance(request, Request): - login_type = request.META.get('HTTP_X_JMS_LOGIN_TYPE', 'U') - if login_type is None: - login_type = 'W' - - with translation.override('en'): - backend = str(get_login_backend(request)) - - data = { - 'username': username, - 'ip': login_ip, - 'type': login_type, - 'user_agent': user_agent[0:254], - 'datetime': timezone.now(), - 'backend': backend, - } - return data - - -@receiver(post_auth_success) -def on_user_auth_success(sender, user, request, login_type=None, **kwargs): - logger.debug('User login success: {}'.format(user.username)) - check_different_city_login_if_need(user, request) - data = generate_data(user.username, request, login_type=login_type) - request.session['login_time'] = data['datetime'].strftime("%Y-%m-%d %H:%M:%S") - data.update({'mfa': int(user.mfa_enabled), 'status': True}) - write_login_log(**data) - - -@receiver(post_auth_failed) -def on_user_auth_failed(sender, username, request, reason='', **kwargs): - logger.debug('User login failed: {}'.format(username)) - data = generate_data(username, request) - data.update({'reason': reason[:128], 'status': False}) - write_login_log(**data) - - -@receiver(django_ready) -def on_django_start_set_operate_log_monitor_models(sender, **kwargs): - exclude_apps = { - 'django_cas_ng', 'captcha', 'admin', 'jms_oidc_rp', - 'django_celery_beat', 'contenttypes', 'sessions', 'auth' - } - exclude_models = { - 'UserPasswordHistory', 'ContentType', - 'MessageContent', 'SiteMessage', - 'PlatformAutomation', 'PlatformProtocol', 'Protocol', - 'HistoricalAccount', 'GatheredUser', 'ApprovalRule', - 'BaseAutomation', 'CeleryTask', 'Command', 'JobAuditLog', - 'ConnectionToken', 'SessionJoinRecord', - 'HistoricalJob', 'Status', 'TicketStep', 'Ticket', - 'UserAssetGrantedTreeNodeRelation', 'TicketAssignee', - 'SuperTicket', 'SuperConnectionToken', 'PermNode', - 'PermedAsset', 'PermedAccount', 'MenuPermission', - 'Permission', 'TicketSession', 'ApplyLoginTicket', - 'ApplyCommandTicket', 'ApplyLoginAssetTicket', - 'FTPLog', 'OperateLog', 'PasswordChangeLog' - } - for i, app in enumerate(apps.get_models(), 1): - app_name = app._meta.app_label - model_name = app._meta.object_name - if app_name in exclude_apps or \ - model_name in exclude_models or \ - model_name.endswith('Execution'): - continue - MODELS_NEED_RECORD.add(model_name) diff --git a/apps/audits/signal_handlers/__init__.py b/apps/audits/signal_handlers/__init__.py new file mode 100644 index 000000000..82e0f4917 --- /dev/null +++ b/apps/audits/signal_handlers/__init__.py @@ -0,0 +1,4 @@ +from .activity_log import * +from .login_log import * +from .operate_log import * +from .other import * diff --git a/apps/audits/signal_handlers/activity_log.py b/apps/audits/signal_handlers/activity_log.py new file mode 100644 index 000000000..ce1b99fe2 --- /dev/null +++ b/apps/audits/signal_handlers/activity_log.py @@ -0,0 +1,190 @@ +# -*- coding: utf-8 -*- +# +from celery import signals +from django.db.models.signals import post_save +from django.utils.translation import ugettext_lazy as _ + +from audits.models import ActivityLog +from assets.models import Asset, Node +from accounts.const import AutomationTypes +from accounts.models import AccountBackupAutomation +from common.utils import get_object_or_none +from ops.celery import app +from orgs.utils import tmp_to_root_org +from terminal.models import Session +from users.models import User +from jumpserver.utils import current_request + +from ..const import ActivityChoices + + +class ActivityLogHandler(object): + + @staticmethod + def _func_accounts_execute_automation(*args, **kwargs): + asset_ids = [] + pid, tp = kwargs.get('pid'), kwargs.get('tp') + model = AutomationTypes.get_type_model(tp) + task_type_label = tp.label + with tmp_to_root_org(): + instance = get_object_or_none(model, pk=pid) + if instance is not None: + asset_ids = instance.get_all_assets().values_list('id', flat=True) + return task_type_label, asset_ids + + @staticmethod + def _func_accounts_push_accounts_to_assets(*args, **kwargs): + return '', args[0][1] + + @staticmethod + def _func_accounts_execute_account_backup_plan(*args, **kwargs): + asset_ids, pid = [], kwargs.get('pid') + with tmp_to_root_org(): + instance = get_object_or_none(AccountBackupAutomation, pk=pid) + if instance is not None: + asset_ids = Asset.objects.filter( + platform__type__in=instance.types + ).values_list('id', flat=True) + return '', asset_ids + + @staticmethod + def _func_assets_verify_accounts_connectivity(*args, **kwargs): + return '', args[0][1] + + @staticmethod + def _func_accounts_verify_accounts_connectivity(*args, **kwargs): + return '', args[0][1] + + @staticmethod + def _func_assets_test_assets_connectivity_manual(*args, **kwargs): + return '', args[0][0] + + @staticmethod + def _func_assets_test_node_assets_connectivity_manual(*args, **kwargs): + asset_ids = [] + node = get_object_or_none(Node, pk=args[0][0]) + if node is not None: + asset_ids = node.get_all_assets().values_list('id', flat=True) + return '', asset_ids + + @staticmethod + def _func_assets_update_assets_hardware_info_manual(*args, **kwargs): + return '', args[0][0] + + @staticmethod + def _func_assets_update_node_assets_hardware_info_manual(*args, **kwargs): + asset_ids = [] + node = get_object_or_none(Node, pk=args[0][0]) + if node is not None: + asset_ids = node.get_all_assets().values_list('id', flat=True) + return '', asset_ids + + def get_celery_task_info(self, task_name, *args, **kwargs): + task_display, resource_ids = self.get_info_by_task_name( + task_name, *args, **kwargs + ) + return task_display, resource_ids + + @staticmethod + def get_task_display(task_name, **kwargs): + task = app.tasks.get(task_name) + return getattr(task, 'verbose_name', _('Unknown')) + + def get_info_by_task_name(self, task_name, *args, **kwargs): + resource_ids = [] + task_name_list = str(task_name).split('.') + if len(task_name_list) < 2: + return '', resource_ids + + task_display = self.get_task_display(task_name) + model, name = task_name_list[0], task_name_list[-1] + func_name = '_func_%s_%s' % (model, name) + handle_func = getattr(self, func_name, None) + if handle_func is not None: + task_type, resource_ids = handle_func(*args, **kwargs) + if task_type: + task_display = '%s-%s' % (task_display, task_type) + return task_display, resource_ids + + @staticmethod + def session_for_activity(obj): + detail = _( + '{} used account[{}], login method[{}] login the asset.' + ).format( + obj.user, obj.account, obj.login_from_display + ) + return obj.asset_id, detail, ActivityChoices.session_log + + @staticmethod + def login_log_for_activity(obj): + login_status = _('Success') if obj.status else _('Failed') + detail = _('User {} login into this service.[{}]').format( + obj.username, login_status + ) + user_id = User.objects.filter(username=obj.username).values('id').first() + return user_id['id'], detail, ActivityChoices.login_log + + +activity_handler = ActivityLogHandler() + + +@signals.before_task_publish.connect +def before_task_publish_for_activity_log(headers=None, **kwargs): + task_id, task_name = headers.get('id'), headers.get('task') + args, kwargs = kwargs['body'][:2] + task_display, resource_ids = activity_handler.get_celery_task_info( + task_name, args, **kwargs + ) + activities = [] + detail = _('User %s performs a task(%s) for this resource.') % ( + getattr(current_request, 'user', None), task_display + ) + for resource_id in resource_ids: + activities.append( + ActivityLog( + resource_id=resource_id, type=ActivityChoices.task, detail=detail + ) + ) + ActivityLog.objects.bulk_create(activities) + + activity_info = { + 'activity_ids': [a.id for a in activities] + } + kwargs['activity_info'] = activity_info + + +@signals.task_prerun.connect +def on_celery_task_pre_run_for_activity_log(task_id='', **kwargs): + activity_info = kwargs['kwargs'].pop('activity_info', None) + if activity_info is None: + return + + activities = [] + for activity_id in activity_info['activity_ids']: + activities.append( + ActivityLog(id=activity_id, detail_id=task_id) + ) + ActivityLog.objects.bulk_update(activities, ('detail_id', )) + + +@post_save.connect +def on_object_created( + sender, instance=None, created=False, update_fields=None, **kwargs +): + handler_mapping = { + 'Session': activity_handler.session_for_activity, + 'UserLoginLog': activity_handler.login_log_for_activity + } + model_name = sender._meta.object_name + if not created or model_name not in handler_mapping: + return + + resource_id, detail, a_type = handler_mapping[model_name](instance) + + ActivityLog.objects.create( + resource_id=resource_id, type=a_type, + detail=detail, detail_id=instance.id + ) + + + diff --git a/apps/audits/signal_handlers/login_log.py b/apps/audits/signal_handlers/login_log.py new file mode 100644 index 000000000..34e8665ac --- /dev/null +++ b/apps/audits/signal_handlers/login_log.py @@ -0,0 +1,96 @@ +# -*- coding: utf-8 -*- +# +from django.utils.functional import LazyObject +from django.utils.translation import ugettext_lazy as _ +from django.conf import settings +from django.contrib.auth import BACKEND_SESSION_KEY +from django.dispatch import receiver +from django.utils import timezone, translation +from rest_framework.request import Request + +from authentication.signals import post_auth_failed, post_auth_success +from authentication.utils import check_different_city_login_if_need +from common.utils import get_request_ip, get_logger +from users.models import User + +from ..utils import write_login_log + + +logger = get_logger(__name__) + + +class AuthBackendLabelMapping(LazyObject): + @staticmethod + def get_login_backends(): + backend_label_mapping = {} + for source, backends in User.SOURCE_BACKEND_MAPPING.items(): + for backend in backends: + backend_label_mapping[backend] = source.label + backend_label_mapping[settings.AUTH_BACKEND_PUBKEY] = _("SSH Key") + backend_label_mapping[settings.AUTH_BACKEND_MODEL] = _("Password") + backend_label_mapping[settings.AUTH_BACKEND_SSO] = _("SSO") + backend_label_mapping[settings.AUTH_BACKEND_AUTH_TOKEN] = _("Auth Token") + backend_label_mapping[settings.AUTH_BACKEND_WECOM] = _("WeCom") + backend_label_mapping[settings.AUTH_BACKEND_FEISHU] = _("FeiShu") + backend_label_mapping[settings.AUTH_BACKEND_DINGTALK] = _("DingTalk") + backend_label_mapping[settings.AUTH_BACKEND_TEMP_TOKEN] = _("Temporary token") + return backend_label_mapping + + def _setup(self): + self._wrapped = self.get_login_backends() + + +AUTH_BACKEND_LABEL_MAPPING = AuthBackendLabelMapping() + + +def get_login_backend(request): + backend = request.session.get('auth_backend', '') or \ + request.session.get(BACKEND_SESSION_KEY, '') + + backend_label = AUTH_BACKEND_LABEL_MAPPING.get(backend, None) + if backend_label is None: + backend_label = '' + return backend_label + + +def generate_data(username, request, login_type=None): + user_agent = request.META.get('HTTP_USER_AGENT', '') + login_ip = get_request_ip(request) or '0.0.0.0' + + if login_type is None and isinstance(request, Request): + login_type = request.META.get('HTTP_X_JMS_LOGIN_TYPE', 'U') + if login_type is None: + login_type = 'W' + + with translation.override('en'): + backend = str(get_login_backend(request)) + + data = { + 'username': username, + 'ip': login_ip, + 'type': login_type, + 'user_agent': user_agent[0:254], + 'datetime': timezone.now(), + 'backend': backend, + } + return data + + +@receiver(post_auth_success) +def on_user_auth_success(sender, user, request, login_type=None, **kwargs): + logger.debug('User login success: {}'.format(user.username)) + check_different_city_login_if_need(user, request) + data = generate_data( + user.username, request, login_type=login_type + ) + request.session['login_time'] = data['datetime'].strftime("%Y-%m-%d %H:%M:%S") + data.update({'mfa': int(user.mfa_enabled), 'status': True}) + write_login_log(**data) + + +@receiver(post_auth_failed) +def on_user_auth_failed(sender, username, request, reason='', **kwargs): + logger.debug('User login failed: {}'.format(username)) + data = generate_data(username, request) + data.update({'reason': reason[:128], 'status': False}) + write_login_log(**data) diff --git a/apps/audits/signal_handlers/operate_log.py b/apps/audits/signal_handlers/operate_log.py new file mode 100644 index 000000000..0c0fb9f48 --- /dev/null +++ b/apps/audits/signal_handlers/operate_log.py @@ -0,0 +1,180 @@ +# -*- coding: utf-8 -*- +# +import uuid + +from django.apps import apps +from django.dispatch import receiver +from django.db.models.signals import post_save, pre_save, m2m_changed, pre_delete + +from audits.handler import ( + get_instance_current_with_cache_diff, cache_instance_before_data, + create_or_update_operate_log, get_instance_dict_from_cache +) +from audits.utils import model_to_dict_for_operate_log as model_to_dict +from common.const.signals import POST_ADD, POST_REMOVE, POST_CLEAR, SKIP_SIGNAL +from common.signals import django_ready + +from ..const import MODELS_NEED_RECORD, ActionChoices + + +M2M_ACTION = { + POST_ADD: ActionChoices.create, + POST_REMOVE: ActionChoices.delete, + POST_CLEAR: ActionChoices.delete, +} + + +@receiver(m2m_changed) +def on_m2m_changed(sender, action, instance, reverse, model, pk_set, **kwargs): + if action not in M2M_ACTION: + return + if not instance: + return + + resource_type = instance._meta.verbose_name + current_instance = model_to_dict(instance, include_model_fields=False) + + instance_id = current_instance.get('id') + log_id, before_instance = get_instance_dict_from_cache(instance_id) + + field_name = str(model._meta.verbose_name) + objs = model.objects.filter(pk__in=pk_set) + objs_display = [str(o) for o in objs] + action = M2M_ACTION[action] + changed_field = current_instance.get(field_name, []) + + after, before, before_value = None, None, None + if action == ActionChoices.create: + before_value = list(set(changed_field) - set(objs_display)) + elif action == ActionChoices.delete: + before_value = list( + set(changed_field).symmetric_difference(set(objs_display)) + ) + + if changed_field: + after = {field_name: changed_field} + if before_value: + before = {field_name: before_value} + + if sorted(str(before)) == sorted(str(after)): + return + + create_or_update_operate_log( + ActionChoices.update, resource_type, + resource=instance, log_id=log_id, before=before, after=after + ) + + +def signal_of_operate_log_whether_continue( + sender, instance, created, update_fields=None +): + condition = True + if not instance: + condition = False + if instance and getattr(instance, SKIP_SIGNAL, False): + condition = False + # 终端模型的 create 事件由系统产生,不记录 + if instance._meta.object_name == 'Terminal' and created: + condition = False + # last_login 改变是最后登录日期, 每次登录都会改变 + if instance._meta.object_name == 'User' and \ + update_fields and 'last_login' in update_fields: + condition = False + # 不在记录白名单中,跳过 + if sender._meta.object_name not in MODELS_NEED_RECORD: + condition = False + return condition + + +@receiver(pre_save) +def on_object_pre_create_or_update( + sender, instance=None, raw=False, using=None, update_fields=None, **kwargs +): + ok = signal_of_operate_log_whether_continue( + sender, instance, False, update_fields + ) + if not ok: + return + + # users.PrivateToken Model 没有 id 有 pk字段 + instance_id = getattr(instance, 'id', getattr(instance, 'pk', None)) + instance_before_data = {'id': instance_id} + raw_instance = type(instance).objects.filter(pk=instance_id).first() + + if raw_instance: + instance_before_data = model_to_dict(raw_instance) + operate_log_id = str(uuid.uuid4()) + instance_before_data['operate_log_id'] = operate_log_id + setattr(instance, 'operate_log_id', operate_log_id) + cache_instance_before_data(instance_before_data) + + +@receiver(post_save) +def on_object_created_or_update( + sender, instance=None, created=False, update_fields=None, **kwargs +): + ok = signal_of_operate_log_whether_continue( + sender, instance, created, update_fields + ) + if not ok: + return + + log_id, before, after = None, None, None + if created: + action = ActionChoices.create + after = model_to_dict(instance) + log_id = getattr(instance, 'operate_log_id', None) + else: + action = ActionChoices.update + current_instance = model_to_dict(instance) + log_id, before, after = get_instance_current_with_cache_diff(current_instance) + + resource_type = sender._meta.verbose_name + object_name = sender._meta.object_name + create_or_update_operate_log( + action, resource_type, resource=instance, log_id=log_id, + before=before, after=after, object_name=object_name + ) + + +@receiver(pre_delete) +def on_object_delete(sender, instance=None, **kwargs): + ok = signal_of_operate_log_whether_continue(sender, instance, False) + if not ok: + return + + resource_type = sender._meta.verbose_name + create_or_update_operate_log( + ActionChoices.delete, resource_type, + resource=instance, before=model_to_dict(instance) + ) + + +@receiver(django_ready) +def on_django_start_set_operate_log_monitor_models(sender, **kwargs): + exclude_apps = { + 'django_cas_ng', 'captcha', 'admin', 'jms_oidc_rp', 'audits', + 'django_celery_beat', 'contenttypes', 'sessions', 'auth', + } + exclude_models = { + 'UserPasswordHistory', 'ContentType', + 'MessageContent', 'SiteMessage', + 'PlatformAutomation', 'PlatformProtocol', 'Protocol', + 'HistoricalAccount', 'GatheredUser', 'ApprovalRule', + 'BaseAutomation', 'CeleryTask', 'Command', 'JobAuditLog', + 'ConnectionToken', 'SessionJoinRecord', + 'HistoricalJob', 'Status', 'TicketStep', 'Ticket', + 'UserAssetGrantedTreeNodeRelation', 'TicketAssignee', + 'SuperTicket', 'SuperConnectionToken', 'PermNode', + 'PermedAsset', 'PermedAccount', 'MenuPermission', + 'Permission', 'TicketSession', 'ApplyLoginTicket', + 'ApplyCommandTicket', 'ApplyLoginAssetTicket', + } + for i, app in enumerate(apps.get_models(), 1): + app_name = app._meta.app_label + model_name = app._meta.object_name + if app_name in exclude_apps or \ + model_name in exclude_models or \ + model_name.endswith('Execution'): + continue + MODELS_NEED_RECORD.add(model_name) diff --git a/apps/audits/signal_handlers/other.py b/apps/audits/signal_handlers/other.py new file mode 100644 index 000000000..07d3694fe --- /dev/null +++ b/apps/audits/signal_handlers/other.py @@ -0,0 +1,68 @@ +# -*- coding: utf-8 -*- +# +from django.dispatch import receiver +from django.db import transaction + +from audits.models import ( + PasswordChangeLog, UserLoginLog, FTPLog, OperateLog +) +from audits.serializers import ( + UserLoginLogSerializer, FTPLogSerializer, OperateLogSerializer, + PasswordChangeLogSerializer +) +from common.utils import get_request_ip, get_syslogger +from common.utils.encode import data_to_json +from jumpserver.utils import current_request +from users.models import User +from users.signals import post_user_change_password +from terminal.models import Session, Command +from terminal.serializers import SessionSerializer, SessionCommandSerializer + + +sys_logger = get_syslogger(__name__) + + +@receiver(post_user_change_password, sender=User) +def on_user_change_password(sender, user=None, **kwargs): + if not current_request: + remote_addr = '127.0.0.1' + change_by = 'System' + else: + remote_addr = get_request_ip(current_request) + if not current_request.user.is_authenticated: + change_by = str(user) + else: + change_by = str(current_request.user) + with transaction.atomic(): + PasswordChangeLog.objects.create( + user=str(user), change_by=change_by, + remote_addr=remote_addr, + ) + + +def on_audits_log_create(sender, instance=None, **kwargs): + if sender == UserLoginLog: + category = "login_log" + serializer_cls = UserLoginLogSerializer + elif sender == FTPLog: + category = "ftp_log" + serializer_cls = FTPLogSerializer + elif sender == OperateLog: + category = "operation_log" + serializer_cls = OperateLogSerializer + elif sender == PasswordChangeLog: + category = "password_change_log" + serializer_cls = PasswordChangeLogSerializer + elif sender == Session: + category = "host_session_log" + serializer_cls = SessionSerializer + elif sender == Command: + category = "session_command_log" + serializer_cls = SessionCommandSerializer + else: + return + + serializer = serializer_cls(instance) + data = data_to_json(serializer.data, indent=None) + msg = "{} - {}".format(category, data) + sys_logger.info(msg) diff --git a/apps/audits/tasks.py b/apps/audits/tasks.py index 1dc507c25..0f67ad2dc 100644 --- a/apps/audits/tasks.py +++ b/apps/audits/tasks.py @@ -7,7 +7,7 @@ from celery import shared_task from ops.celery.decorator import ( register_as_period_task ) -from .models import UserLoginLog, OperateLog, FTPLog +from .models import UserLoginLog, OperateLog, FTPLog, ActivityLog from common.utils import get_log_keep_day from django.utils.translation import gettext_lazy as _ @@ -26,6 +26,13 @@ def clean_operation_log_period(): OperateLog.objects.filter(datetime__lt=expired_day).delete() +def clean_activity_log_period(): + now = timezone.now() + days = get_log_keep_day('ACTIVITY_LOG_KEEP_DAYS') + expired_day = now - datetime.timedelta(days=days) + ActivityLog.objects.filter(datetime__lt=expired_day).delete() + + def clean_ftp_log_period(): now = timezone.now() days = get_log_keep_day('FTP_LOG_KEEP_DAYS') diff --git a/apps/jumpserver/conf.py b/apps/jumpserver/conf.py index 55b94c87f..a31a85ec0 100644 --- a/apps/jumpserver/conf.py +++ b/apps/jumpserver/conf.py @@ -512,6 +512,7 @@ class Config(dict): 'LOGIN_LOG_KEEP_DAYS': 200, 'TASK_LOG_KEEP_DAYS': 90, 'OPERATE_LOG_KEEP_DAYS': 200, + 'ACTIVITY_LOG_KEEP_DAYS': 200, 'FTP_LOG_KEEP_DAYS': 200, 'CLOUD_SYNC_TASK_EXECUTION_KEEP_DAYS': 30, diff --git a/apps/jumpserver/settings/custom.py b/apps/jumpserver/settings/custom.py index 7cb92915d..fa6bc398f 100644 --- a/apps/jumpserver/settings/custom.py +++ b/apps/jumpserver/settings/custom.py @@ -117,6 +117,7 @@ WS_LISTEN_PORT = CONFIG.WS_LISTEN_PORT LOGIN_LOG_KEEP_DAYS = CONFIG.LOGIN_LOG_KEEP_DAYS TASK_LOG_KEEP_DAYS = CONFIG.TASK_LOG_KEEP_DAYS OPERATE_LOG_KEEP_DAYS = CONFIG.OPERATE_LOG_KEEP_DAYS +ACTIVITY_LOG_KEEP_DAYS = CONFIG.ACTIVITY_LOG_KEEP_DAYS FTP_LOG_KEEP_DAYS = CONFIG.FTP_LOG_KEEP_DAYS ORG_CHANGE_TO_URL = CONFIG.ORG_CHANGE_TO_URL WINDOWS_SKIP_ALL_MANUAL_PASSWORD = CONFIG.WINDOWS_SKIP_ALL_MANUAL_PASSWORD diff --git a/apps/rbac/const.py b/apps/rbac/const.py index 50d3a4c11..20e8d1dce 100644 --- a/apps/rbac/const.py +++ b/apps/rbac/const.py @@ -78,6 +78,7 @@ exclude_permissions = ( ('orgs', 'organizationmember', '*', '*'), ('settings', 'setting', 'add,change,delete', 'setting'), ('audits', 'operatelog', 'add,delete,change', 'operatelog'), + ('audits', 'activitylog', 'add,delete,change', 'activitylog'), ('audits', 'passwordchangelog', 'add,change,delete', 'passwordchangelog'), ('audits', 'userloginlog', 'add,change,delete,change', 'userloginlog'), ('audits', 'ftplog', 'change,delete', 'ftplog'), diff --git a/apps/settings/serializers/cleaning.py b/apps/settings/serializers/cleaning.py index a180a9ac5..cc6fb00bf 100644 --- a/apps/settings/serializers/cleaning.py +++ b/apps/settings/serializers/cleaning.py @@ -31,4 +31,7 @@ class CleaningSerializer(serializers.Serializer): min_value=1, max_value=99999, required=True, label=_('Session keep duration'), help_text=_('Unit: days, Session, record, command will be delete if more than duration, only in database') ) - + ACTIVITY_LOG_KEEP_DAYS = serializers.IntegerField( + min_value=1, max_value=9999, + label=_("Activity log keep days"), help_text=_("Unit: day") + )