2018-04-06 03:27:52 +00:00
|
|
|
# -*- coding: utf-8 -*-
|
|
|
|
#
|
2023-06-08 10:04:07 +00:00
|
|
|
|
2022-11-04 06:22:38 +00:00
|
|
|
from importlib import import_module
|
|
|
|
|
|
|
|
from django.conf import settings
|
2023-02-17 09:14:53 +00:00
|
|
|
from django.db.models import F, Value, CharField, Q
|
2023-06-08 10:04:07 +00:00
|
|
|
from django.http import HttpResponse, FileResponse
|
|
|
|
from django.utils.encoding import escape_uri_path
|
2021-12-13 02:50:10 +00:00
|
|
|
from rest_framework import generics
|
2023-09-15 09:15:15 +00:00
|
|
|
from rest_framework import status
|
|
|
|
from rest_framework import viewsets
|
2023-08-28 07:43:45 +00:00
|
|
|
from rest_framework.decorators import action
|
2023-02-14 12:01:29 +00:00
|
|
|
from rest_framework.permissions import IsAuthenticated
|
2023-06-08 10:04:07 +00:00
|
|
|
from rest_framework.response import Response
|
2018-04-06 03:27:52 +00:00
|
|
|
|
2023-09-15 09:15:15 +00:00
|
|
|
from common.api import CommonApiMixin
|
2023-08-28 07:43:45 +00:00
|
|
|
from common.const.http import GET, POST
|
|
|
|
from common.drf.filters import DatetimeRangeFilterBackend
|
2023-06-08 10:04:07 +00:00
|
|
|
from common.permissions import IsServiceAccount
|
2022-12-19 10:04:11 +00:00
|
|
|
from common.plugins.es import QuerySet as ESQuerySet
|
2023-06-08 10:04:07 +00:00
|
|
|
from common.storage.ftp_file import FTPFileStorageHandler
|
2023-08-28 07:43:45 +00:00
|
|
|
from common.utils import is_uuid, get_logger, lazyproperty
|
2023-02-21 11:35:35 +00:00
|
|
|
from orgs.mixins.api import OrgReadonlyModelViewSet, OrgModelViewSet
|
2023-02-22 07:57:10 +00:00
|
|
|
from orgs.models import Organization
|
2023-08-28 07:43:45 +00:00
|
|
|
from orgs.utils import current_org, tmp_to_root_org
|
2023-06-08 10:04:07 +00:00
|
|
|
from rbac.permissions import RBACPermission
|
|
|
|
from terminal.models import default_storage
|
2023-02-17 09:14:53 +00:00
|
|
|
from users.models import User
|
2022-11-04 06:22:38 +00:00
|
|
|
from .backends import TYPE_ENGINE_MAPPING
|
2023-02-07 00:52:48 +00:00
|
|
|
from .const import ActivityChoices
|
2023-09-13 08:52:11 +00:00
|
|
|
from .models import (
|
|
|
|
FTPLog, UserLoginLog, OperateLog, PasswordChangeLog,
|
2023-09-15 09:15:15 +00:00
|
|
|
ActivityLog, JobLog, UserSession
|
2023-09-13 08:52:11 +00:00
|
|
|
)
|
2022-11-04 06:22:38 +00:00
|
|
|
from .serializers import (
|
2023-06-08 10:04:07 +00:00
|
|
|
FTPLogSerializer, UserLoginLogSerializer, JobLogSerializer,
|
2023-01-16 11:02:09 +00:00
|
|
|
OperateLogSerializer, OperateLogActionDetailSerializer,
|
2023-02-14 12:01:29 +00:00
|
|
|
PasswordChangeLogSerializer, ActivityUnionLogSerializer,
|
2023-09-15 09:15:15 +00:00
|
|
|
FileSerializer, UserSessionSerializer
|
2022-11-04 06:22:38 +00:00
|
|
|
)
|
2018-04-06 03:27:52 +00:00
|
|
|
|
2023-06-08 10:04:07 +00:00
|
|
|
logger = get_logger(__name__)
|
|
|
|
|
|
|
|
|
2023-02-20 06:13:22 +00:00
|
|
|
class JobAuditViewSet(OrgReadonlyModelViewSet):
|
2023-02-17 11:58:43 +00:00
|
|
|
model = JobLog
|
2023-08-28 07:43:45 +00:00
|
|
|
extra_filter_backends = [DatetimeRangeFilterBackend]
|
2023-02-16 09:10:46 +00:00
|
|
|
date_range_filter_fields = [
|
|
|
|
('date_start', ('date_from', 'date_to'))
|
|
|
|
]
|
|
|
|
search_fields = ['creator__name', 'material']
|
2023-02-20 08:57:57 +00:00
|
|
|
filterset_fields = ['creator__name', 'material']
|
2023-02-17 11:58:43 +00:00
|
|
|
serializer_class = JobLogSerializer
|
2023-02-16 09:10:46 +00:00
|
|
|
ordering = ['-date_start']
|
2022-12-16 07:52:02 +00:00
|
|
|
|
|
|
|
|
2023-02-21 11:35:35 +00:00
|
|
|
class FTPLogViewSet(OrgModelViewSet):
|
2019-10-18 07:05:45 +00:00
|
|
|
model = FTPLog
|
2018-04-06 03:27:52 +00:00
|
|
|
serializer_class = FTPLogSerializer
|
2023-08-28 07:43:45 +00:00
|
|
|
extra_filter_backends = [DatetimeRangeFilterBackend]
|
2020-05-08 04:46:18 +00:00
|
|
|
date_range_filter_fields = [
|
|
|
|
('date_start', ('date_from', 'date_to'))
|
|
|
|
]
|
2022-12-20 11:18:39 +00:00
|
|
|
filterset_fields = ['user', 'asset', 'account', 'filename']
|
2021-01-07 02:53:10 +00:00
|
|
|
search_fields = filterset_fields
|
2020-06-23 10:24:45 +00:00
|
|
|
ordering = ['-date_start']
|
2023-06-08 10:04:07 +00:00
|
|
|
http_method_names = ['post', 'get', 'head', 'options', 'patch']
|
|
|
|
rbac_perms = {
|
|
|
|
'download': 'audits.view_ftplog',
|
|
|
|
}
|
|
|
|
|
|
|
|
def get_storage(self):
|
|
|
|
return FTPFileStorageHandler(self.get_object())
|
|
|
|
|
|
|
|
@action(
|
|
|
|
methods=[GET], detail=True, permission_classes=[RBACPermission, ],
|
|
|
|
url_path='file/download'
|
|
|
|
)
|
|
|
|
def download(self, request, *args, **kwargs):
|
|
|
|
ftp_log = self.get_object()
|
|
|
|
ftp_storage = self.get_storage()
|
2023-06-13 03:33:08 +00:00
|
|
|
local_path, url = ftp_storage.get_file_path_url()
|
2023-06-08 10:04:07 +00:00
|
|
|
if local_path is None:
|
2023-06-13 03:33:08 +00:00
|
|
|
# url => error message
|
|
|
|
return HttpResponse(url)
|
2023-06-08 10:04:07 +00:00
|
|
|
|
|
|
|
file = open(default_storage.path(local_path), 'rb')
|
|
|
|
response = FileResponse(file)
|
|
|
|
response['Content-Type'] = 'application/octet-stream'
|
|
|
|
filename = escape_uri_path(ftp_log.filename)
|
|
|
|
response["Content-Disposition"] = "attachment; filename*=UTF-8''{}".format(filename)
|
|
|
|
return response
|
|
|
|
|
|
|
|
@action(methods=[POST], detail=True, permission_classes=[IsServiceAccount, ], serializer_class=FileSerializer)
|
|
|
|
def upload(self, request, *args, **kwargs):
|
|
|
|
ftp_log = self.get_object()
|
|
|
|
serializer = self.get_serializer(data=request.data)
|
|
|
|
if serializer.is_valid():
|
|
|
|
file = serializer.validated_data['file']
|
|
|
|
name, err = ftp_log.save_file_to_storage(file)
|
|
|
|
if not name:
|
|
|
|
msg = "Failed save file `{}`: {}".format(ftp_log.id, err)
|
|
|
|
logger.error(msg)
|
|
|
|
return Response({'msg': str(err)}, status=400)
|
|
|
|
url = default_storage.url(name)
|
|
|
|
return Response({'url': url}, status=201)
|
|
|
|
else:
|
|
|
|
msg = 'Upload data invalid: {}'.format(serializer.errors)
|
|
|
|
logger.error(msg)
|
|
|
|
return Response({'msg': serializer.errors}, status=401)
|
2020-04-29 11:05:56 +00:00
|
|
|
|
|
|
|
|
2021-12-13 02:50:10 +00:00
|
|
|
class UserLoginCommonMixin:
|
2023-02-20 06:13:22 +00:00
|
|
|
model = UserLoginLog
|
2020-04-29 11:05:56 +00:00
|
|
|
serializer_class = UserLoginLogSerializer
|
2023-08-28 07:43:45 +00:00
|
|
|
extra_filter_backends = [DatetimeRangeFilterBackend]
|
2020-04-29 11:05:56 +00:00
|
|
|
date_range_filter_fields = [
|
|
|
|
('datetime', ('date_from', 'date_to'))
|
|
|
|
]
|
2023-02-07 01:49:17 +00:00
|
|
|
filterset_fields = ['id', 'username', 'ip', 'city', 'type', 'status', 'mfa']
|
|
|
|
search_fields = ['id', 'username', 'ip', 'city']
|
2020-04-29 11:05:56 +00:00
|
|
|
|
2021-12-13 02:50:10 +00:00
|
|
|
|
2023-02-20 06:13:22 +00:00
|
|
|
class UserLoginLogViewSet(UserLoginCommonMixin, OrgReadonlyModelViewSet):
|
2020-04-29 11:05:56 +00:00
|
|
|
@staticmethod
|
|
|
|
def get_org_members():
|
2020-07-20 02:42:22 +00:00
|
|
|
users = current_org.get_members().values_list('username', flat=True)
|
2020-04-29 11:05:56 +00:00
|
|
|
return users
|
|
|
|
|
|
|
|
def get_queryset(self):
|
|
|
|
queryset = super().get_queryset()
|
2021-10-19 07:10:18 +00:00
|
|
|
if current_org.is_root():
|
|
|
|
return queryset
|
|
|
|
users = self.get_org_members()
|
|
|
|
queryset = queryset.filter(username__in=users)
|
2020-04-29 11:05:56 +00:00
|
|
|
return queryset
|
2020-05-08 08:48:26 +00:00
|
|
|
|
|
|
|
|
2023-02-20 06:13:22 +00:00
|
|
|
class MyLoginLogViewSet(UserLoginCommonMixin, OrgReadonlyModelViewSet):
|
2021-12-13 02:50:10 +00:00
|
|
|
permission_classes = [IsAuthenticated]
|
|
|
|
|
|
|
|
def get_queryset(self):
|
|
|
|
qs = super().get_queryset()
|
|
|
|
qs = qs.filter(username=self.request.user.username)
|
|
|
|
return qs
|
|
|
|
|
|
|
|
|
2023-01-16 11:02:09 +00:00
|
|
|
class ResourceActivityAPIView(generics.ListAPIView):
|
2023-02-14 12:01:29 +00:00
|
|
|
serializer_class = ActivityUnionLogSerializer
|
2023-02-20 06:13:22 +00:00
|
|
|
ordering_fields = ['datetime']
|
2023-01-16 11:02:09 +00:00
|
|
|
rbac_perms = {
|
2023-02-07 00:52:48 +00:00
|
|
|
'GET': 'audits.view_activitylog',
|
2023-01-16 11:02:09 +00:00
|
|
|
}
|
|
|
|
|
2023-02-07 00:52:48 +00:00
|
|
|
@staticmethod
|
2023-02-22 07:57:10 +00:00
|
|
|
def get_operate_log_qs(fields, limit, org_q, resource_id=None):
|
2023-02-19 12:20:42 +00:00
|
|
|
q, user = Q(resource_id=resource_id), None
|
|
|
|
if is_uuid(resource_id):
|
|
|
|
user = User.objects.filter(id=resource_id).first()
|
2023-02-19 12:27:25 +00:00
|
|
|
if user is not None:
|
2023-02-17 09:14:53 +00:00
|
|
|
q |= Q(user=str(user))
|
2023-02-22 07:57:10 +00:00
|
|
|
queryset = OperateLog.objects.filter(q, org_q).annotate(
|
2023-02-07 00:52:48 +00:00
|
|
|
r_type=Value(ActivityChoices.operate_log, CharField()),
|
2023-02-08 03:32:05 +00:00
|
|
|
r_detail_id=F('id'), r_detail=Value(None, CharField()),
|
2023-02-07 00:52:48 +00:00
|
|
|
r_user=F('user'), r_action=F('action'),
|
|
|
|
).values(*fields)[:limit]
|
|
|
|
return queryset
|
|
|
|
|
|
|
|
@staticmethod
|
2023-02-22 07:57:10 +00:00
|
|
|
def get_activity_log_qs(fields, limit, org_q, **filters):
|
|
|
|
queryset = ActivityLog.objects.filter(org_q, **filters).annotate(
|
2023-02-14 12:01:29 +00:00
|
|
|
r_type=F('type'), r_detail_id=F('detail_id'),
|
|
|
|
r_detail=F('detail'), r_user=Value(None, CharField()),
|
|
|
|
r_action=Value(None, CharField()),
|
|
|
|
).values(*fields)[:limit]
|
2023-02-07 00:52:48 +00:00
|
|
|
return queryset
|
|
|
|
|
2023-01-16 11:02:09 +00:00
|
|
|
def get_queryset(self):
|
2023-02-07 00:52:48 +00:00
|
|
|
limit = 30
|
2023-01-16 11:02:09 +00:00
|
|
|
resource_id = self.request.query_params.get('resource_id')
|
2023-02-08 03:32:05 +00:00
|
|
|
fields = (
|
|
|
|
'id', 'datetime', 'r_detail', 'r_detail_id',
|
|
|
|
'r_user', 'r_action', 'r_type'
|
|
|
|
)
|
2023-02-22 07:57:10 +00:00
|
|
|
org_q = Q(org_id=Organization.SYSTEM_ID) | Q(org_id=current_org.id)
|
2023-10-10 09:39:41 +00:00
|
|
|
if resource_id:
|
|
|
|
org_q |= Q(org_id='') | Q(org_id=Organization.ROOT_ID)
|
2023-01-16 11:02:09 +00:00
|
|
|
with tmp_to_root_org():
|
2023-02-22 07:57:10 +00:00
|
|
|
qs1 = self.get_operate_log_qs(fields, limit, org_q, resource_id=resource_id)
|
|
|
|
qs2 = self.get_activity_log_qs(fields, limit, org_q, resource_id=resource_id)
|
2023-02-07 00:52:48 +00:00
|
|
|
queryset = qs2.union(qs1)
|
2023-02-14 12:01:29 +00:00
|
|
|
return queryset.order_by('-datetime')[:limit]
|
2023-01-16 11:02:09 +00:00
|
|
|
|
|
|
|
|
2023-02-20 06:13:22 +00:00
|
|
|
class OperateLogViewSet(OrgReadonlyModelViewSet):
|
2020-05-08 08:48:26 +00:00
|
|
|
model = OperateLog
|
|
|
|
serializer_class = OperateLogSerializer
|
2023-08-28 07:43:45 +00:00
|
|
|
extra_filter_backends = [DatetimeRangeFilterBackend]
|
2020-05-08 08:48:26 +00:00
|
|
|
date_range_filter_fields = [
|
|
|
|
('datetime', ('date_from', 'date_to'))
|
|
|
|
]
|
2023-02-17 13:11:06 +00:00
|
|
|
filterset_fields = [
|
|
|
|
'user', 'action', 'resource_type', 'resource',
|
|
|
|
'remote_addr'
|
|
|
|
]
|
|
|
|
search_fields = ['resource', 'user']
|
2020-06-02 07:40:07 +00:00
|
|
|
ordering = ['-datetime']
|
2020-05-08 08:48:26 +00:00
|
|
|
|
2023-03-08 07:54:53 +00:00
|
|
|
@lazyproperty
|
|
|
|
def is_action_detail(self):
|
|
|
|
return self.detail and self.request.query_params.get('type') == 'action_detail'
|
|
|
|
|
2022-11-04 06:22:38 +00:00
|
|
|
def get_serializer_class(self):
|
2023-03-08 07:54:53 +00:00
|
|
|
if self.is_action_detail:
|
2022-11-04 06:22:38 +00:00
|
|
|
return OperateLogActionDetailSerializer
|
|
|
|
return super().get_serializer_class()
|
|
|
|
|
|
|
|
def get_queryset(self):
|
2023-10-17 06:28:19 +00:00
|
|
|
qs = OperateLog.objects.all()
|
2023-03-08 07:54:53 +00:00
|
|
|
if self.is_action_detail:
|
2023-10-17 06:28:19 +00:00
|
|
|
with tmp_to_root_org():
|
|
|
|
qs |= OperateLog.objects.filter(org_id=Organization.SYSTEM_ID)
|
2022-11-04 06:22:38 +00:00
|
|
|
es_config = settings.OPERATE_LOG_ELASTICSEARCH_CONFIG
|
|
|
|
if es_config:
|
|
|
|
engine_mod = import_module(TYPE_ENGINE_MAPPING['es'])
|
|
|
|
store = engine_mod.OperateLogStore(es_config)
|
|
|
|
if store.ping(timeout=2):
|
|
|
|
qs = ESQuerySet(store)
|
|
|
|
qs.model = OperateLog
|
|
|
|
return qs
|
|
|
|
|
2020-05-08 08:48:26 +00:00
|
|
|
|
2023-02-20 06:13:22 +00:00
|
|
|
class PasswordChangeLogViewSet(OrgReadonlyModelViewSet):
|
|
|
|
model = PasswordChangeLog
|
2020-05-08 08:48:26 +00:00
|
|
|
serializer_class = PasswordChangeLogSerializer
|
2023-08-28 07:43:45 +00:00
|
|
|
extra_filter_backends = [DatetimeRangeFilterBackend]
|
2020-05-08 08:48:26 +00:00
|
|
|
date_range_filter_fields = [
|
|
|
|
('datetime', ('date_from', 'date_to'))
|
|
|
|
]
|
2021-01-07 02:53:10 +00:00
|
|
|
filterset_fields = ['user', 'change_by', 'remote_addr']
|
2021-07-13 10:03:02 +00:00
|
|
|
search_fields = filterset_fields
|
2020-06-02 07:40:07 +00:00
|
|
|
ordering = ['-datetime']
|
2020-05-08 08:48:26 +00:00
|
|
|
|
|
|
|
def get_queryset(self):
|
2023-02-09 09:17:35 +00:00
|
|
|
queryset = super().get_queryset()
|
|
|
|
if not current_org.is_root():
|
|
|
|
users = current_org.get_members()
|
|
|
|
queryset = queryset.filter(
|
|
|
|
user__in=[str(user) for user in users]
|
|
|
|
)
|
2020-05-08 08:48:26 +00:00
|
|
|
return queryset
|
2023-09-15 09:15:15 +00:00
|
|
|
|
|
|
|
|
|
|
|
class UserSessionViewSet(CommonApiMixin, viewsets.ModelViewSet):
|
|
|
|
http_method_names = ('get', 'post', 'head', 'options', 'trace')
|
|
|
|
serializer_class = UserSessionSerializer
|
|
|
|
filterset_fields = ['id', 'ip', 'city', 'type']
|
|
|
|
search_fields = ['id', 'ip', 'city']
|
|
|
|
rbac_perms = {
|
2023-10-16 06:22:20 +00:00
|
|
|
'offline': ['audits.offline_usersession']
|
2023-09-15 09:15:15 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
@property
|
|
|
|
def org_user_ids(self):
|
|
|
|
user_ids = current_org.get_members().values_list('id', flat=True)
|
|
|
|
return user_ids
|
|
|
|
|
|
|
|
def get_queryset(self):
|
2023-09-21 06:02:09 +00:00
|
|
|
keys = UserSession.get_keys()
|
2023-10-18 07:50:38 +00:00
|
|
|
queryset = UserSession.objects.filter(key__in=keys)
|
2023-09-15 09:15:15 +00:00
|
|
|
if current_org.is_root():
|
|
|
|
return queryset
|
|
|
|
user_ids = self.org_user_ids
|
|
|
|
queryset = queryset.filter(user_id__in=user_ids)
|
|
|
|
return queryset
|
|
|
|
|
|
|
|
@action(['POST'], detail=False, url_path='offline')
|
|
|
|
def offline(self, request, *args, **kwargs):
|
|
|
|
ids = request.data.get('ids', [])
|
2023-10-18 07:50:38 +00:00
|
|
|
queryset = self.get_queryset()
|
|
|
|
session_key = request.session.session_key
|
|
|
|
queryset = queryset.exclude(key=session_key).filter(id__in=ids)
|
2023-09-15 09:15:15 +00:00
|
|
|
if not queryset.exists():
|
|
|
|
return Response(status=status.HTTP_200_OK)
|
|
|
|
|
|
|
|
keys = queryset.values_list('key', flat=True)
|
|
|
|
session_store_cls = import_module(settings.SESSION_ENGINE).SessionStore
|
|
|
|
for key in keys:
|
|
|
|
session_store_cls(key).delete()
|
|
|
|
queryset.delete()
|
|
|
|
return Response(status=status.HTTP_200_OK)
|