mirror of https://github.com/jumpserver/jumpserver
feat: report charts (#15630)
* perf: initial * perf: basic finished * perf: depend * perf: Update Dockerfile with new base image tag * perf: Add user report api * perf: Update Dockerfile with new base image tag * perf: Use user report api * perf: Update Dockerfile with new base image tag * perf: user login report * perf: Update Dockerfile with new base image tag * perf: user change password * perf: change password dashboard * perf: Update Dockerfile with new base image tag * perf: Translate * perf: asset api * perf: asset activity * perf: Asset report * perf: add charts_map * perf: account report * perf: Translate * perf: account automation * perf: Account automation * perf: title * perf: Update Dockerfile with new base image tag --------- Co-authored-by: ibuler <ibuler@qq.com> Co-authored-by: feng <1304903146@qq.com> Co-authored-by: github-actions[bot] <github-actions[bot]@users.noreply.github.com> Co-authored-by: wangruidong <940853815@qq.com> Co-authored-by: feng626 <57284900+feng626@users.noreply.github.com>pull/15806/head
parent
257f290d18
commit
b38d83c578
|
@ -1,4 +1,4 @@
|
||||||
FROM jumpserver/core-base:20250509_094529 AS stage-build
|
FROM jumpserver/core-base:20250805_083232 AS stage-build
|
||||||
|
|
||||||
ARG VERSION
|
ARG VERSION
|
||||||
|
|
||||||
|
|
|
@ -90,10 +90,10 @@ class ChangeSecretDashboardApi(APIView):
|
||||||
|
|
||||||
def get_change_secret_asset_queryset(self):
|
def get_change_secret_asset_queryset(self):
|
||||||
qs = self.change_secrets_queryset
|
qs = self.change_secrets_queryset
|
||||||
node_ids = qs.filter(nodes__isnull=False).values_list('nodes', flat=True).distinct()
|
node_ids = qs.values_list('nodes', flat=True).distinct()
|
||||||
nodes = Node.objects.filter(id__in=node_ids)
|
nodes = Node.objects.filter(id__in=node_ids).only('id', 'key')
|
||||||
node_asset_ids = Node.get_nodes_all_assets(*nodes).values_list('id', flat=True)
|
node_asset_ids = Node.get_nodes_all_assets(*nodes).values_list('id', flat=True)
|
||||||
direct_asset_ids = qs.filter(assets__isnull=False).values_list('assets', flat=True).distinct()
|
direct_asset_ids = qs.values_list('assets', flat=True).distinct()
|
||||||
asset_ids = set(list(direct_asset_ids) + list(node_asset_ids))
|
asset_ids = set(list(direct_asset_ids) + list(node_asset_ids))
|
||||||
return Asset.objects.filter(id__in=asset_ids)
|
return Asset.objects.filter(id__in=asset_ids)
|
||||||
|
|
||||||
|
|
|
@ -14,6 +14,10 @@ class Connectivity(TextChoices):
|
||||||
NTLM_ERR = 'ntlm_err', _('NTLM credentials rejected error')
|
NTLM_ERR = 'ntlm_err', _('NTLM credentials rejected error')
|
||||||
CREATE_TEMPORARY_ERR = 'create_temp_err', _('Create temporary error')
|
CREATE_TEMPORARY_ERR = 'create_temp_err', _('Create temporary error')
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def as_dict(cls):
|
||||||
|
return {choice.value: choice.label for choice in cls}
|
||||||
|
|
||||||
|
|
||||||
class AutomationTypes(TextChoices):
|
class AutomationTypes(TextChoices):
|
||||||
ping = 'ping', _('Ping')
|
ping = 'ping', _('Ping')
|
||||||
|
|
|
@ -20,3 +20,7 @@ class Category(ChoicesMixin, models.TextChoices):
|
||||||
_category = getattr(cls, category.upper(), None)
|
_category = getattr(cls, category.upper(), None)
|
||||||
choices = [(_category.value, _category.label)] if _category else cls.choices
|
choices = [(_category.value, _category.label)] if _category else cls.choices
|
||||||
return choices
|
return choices
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def as_dict(cls):
|
||||||
|
return {choice.value: choice.label for choice in cls}
|
||||||
|
|
|
@ -53,7 +53,7 @@ class BaseAutomation(PeriodTaskModelMixin, JMSOrgBaseModel):
|
||||||
return name
|
return name
|
||||||
|
|
||||||
def get_all_assets(self):
|
def get_all_assets(self):
|
||||||
nodes = self.nodes.all()
|
nodes = self.nodes.only("id", "key")
|
||||||
node_asset_ids = Node.get_nodes_all_assets(*nodes).values_list("id", flat=True)
|
node_asset_ids = Node.get_nodes_all_assets(*nodes).values_list("id", flat=True)
|
||||||
direct_asset_ids = self.assets.all().values_list("id", flat=True)
|
direct_asset_ids = self.assets.all().values_list("id", flat=True)
|
||||||
asset_ids = set(list(direct_asset_ids) + list(node_asset_ids))
|
asset_ids = set(list(direct_asset_ids) + list(node_asset_ids))
|
||||||
|
|
|
@ -172,10 +172,7 @@ class UserLoginLogViewSet(UserLoginCommonMixin, OrgReadonlyModelViewSet):
|
||||||
|
|
||||||
def get_queryset(self):
|
def get_queryset(self):
|
||||||
queryset = super().get_queryset()
|
queryset = super().get_queryset()
|
||||||
if current_org.is_root() or not settings.XPACK_ENABLED:
|
queryset = queryset.model.filter_queryset_by_org(queryset)
|
||||||
return queryset
|
|
||||||
users = self.get_org_member_usernames()
|
|
||||||
queryset = queryset.filter(username__in=users)
|
|
||||||
return queryset
|
return queryset
|
||||||
|
|
||||||
|
|
||||||
|
@ -297,12 +294,7 @@ class PasswordChangeLogViewSet(OrgReadonlyModelViewSet):
|
||||||
|
|
||||||
def get_queryset(self):
|
def get_queryset(self):
|
||||||
queryset = super().get_queryset()
|
queryset = super().get_queryset()
|
||||||
if not current_org.is_root():
|
return self.model.filter_queryset_by_org(queryset)
|
||||||
users = current_org.get_members()
|
|
||||||
queryset = queryset.filter(
|
|
||||||
user__in=[str(user) for user in users]
|
|
||||||
)
|
|
||||||
return queryset
|
|
||||||
|
|
||||||
|
|
||||||
class UserSessionViewSet(CommonApiMixin, viewsets.ModelViewSet):
|
class UserSessionViewSet(CommonApiMixin, viewsets.ModelViewSet):
|
||||||
|
|
|
@ -189,6 +189,15 @@ class PasswordChangeLog(models.Model):
|
||||||
class Meta:
|
class Meta:
|
||||||
verbose_name = _("Password change log")
|
verbose_name = _("Password change log")
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def filter_queryset_by_org(queryset):
|
||||||
|
if not current_org.is_root():
|
||||||
|
users = current_org.get_members()
|
||||||
|
queryset = queryset.filter(
|
||||||
|
user__in=[str(user) for user in users]
|
||||||
|
)
|
||||||
|
return queryset
|
||||||
|
|
||||||
|
|
||||||
class UserLoginLog(models.Model):
|
class UserLoginLog(models.Model):
|
||||||
id = models.UUIDField(default=uuid.uuid4, primary_key=True)
|
id = models.UUIDField(default=uuid.uuid4, primary_key=True)
|
||||||
|
@ -258,6 +267,15 @@ class UserLoginLog(models.Model):
|
||||||
reason = old_reason_choices.get(self.reason, self.reason)
|
reason = old_reason_choices.get(self.reason, self.reason)
|
||||||
return reason
|
return reason
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def filter_queryset_by_org(queryset):
|
||||||
|
from audits.utils import construct_userlogin_usernames
|
||||||
|
if current_org.is_root() or not settings.XPACK_ENABLED:
|
||||||
|
return queryset
|
||||||
|
user_queryset = current_org.get_members()
|
||||||
|
users = construct_userlogin_usernames(user_queryset)
|
||||||
|
return queryset.filter(username__in=users)
|
||||||
|
|
||||||
class Meta:
|
class Meta:
|
||||||
ordering = ["-datetime", "username"]
|
ordering = ["-datetime", "username"]
|
||||||
verbose_name = _("User login log")
|
verbose_name = _("User login log")
|
||||||
|
|
|
@ -1559,5 +1559,49 @@
|
||||||
"setVariable": "Set variable",
|
"setVariable": "Set variable",
|
||||||
"userId": "User ID",
|
"userId": "User ID",
|
||||||
"userName": "User name",
|
"userName": "User name",
|
||||||
|
"UserActivity": "User activity",
|
||||||
|
"AssetActivity": "Asset activity",
|
||||||
|
"UserReport": "User report",
|
||||||
|
"AssetReport": "Asset report",
|
||||||
|
"AccountReport": "Account report",
|
||||||
|
"NotEnableMfa": "Not enabled mfa",
|
||||||
|
"FaceVector": "Face vector",
|
||||||
|
"NeedUpdatePassword": "Need to update password",
|
||||||
|
"UserOverview": "User overview",
|
||||||
|
"LoginSource": "Login source",
|
||||||
|
"LoginCtyDistribution": "Login country distribution",
|
||||||
|
"VisitTimeDistribution": "Visit time distribution",
|
||||||
|
"LoginMethodStatistics": "Login method statistics",
|
||||||
|
"UserLoginReport": "User login report",
|
||||||
|
"UserChangePasswordReport": "User change password report",
|
||||||
|
"ChangePasswordOverview": "Change password overview",
|
||||||
|
"Operator": "Operator",
|
||||||
|
"TargetUser": "Target user",
|
||||||
|
"OperatorGeographicDistribution": "Operator geographic distribution",
|
||||||
|
"UserModificationTrends": "User modification trends",
|
||||||
|
"ModifyTheTargetUserTopTank": "Modify the target user ranking",
|
||||||
|
"TopRankOfOperateUsers": "Ranking of operating users",
|
||||||
|
"AssetStatisticsReport": "Asset statistics report",
|
||||||
|
"AssetActivityReport": "Asset activity report",
|
||||||
|
"AssetOverview": "Asset overview",
|
||||||
|
"LinkedDomains": "Linked domains",
|
||||||
|
"ConnectedDirectoryServices": "Connected directory services",
|
||||||
|
"AssetTypeDistribution": "Asset type distribution",
|
||||||
|
"WeeklyGrowthTrend": "Weekly growth trend",
|
||||||
|
"DistributionOfAssetLoginMethods": "Distribution of asset login methods",
|
||||||
|
"RemoteLoginProtocolUsageDistribution": "Remote login protocol usage distribution",
|
||||||
|
"OperatingSystemDistributionOfLoginAssets": "Operating system distribution of login assets",
|
||||||
|
"AssetLoginTrends": "Asset login trends",
|
||||||
|
"ConnectionCount": "Connection count",
|
||||||
|
"AccountStatisticsReport": "Account statistics report",
|
||||||
|
"AccountOverview": "Account overview",
|
||||||
|
"AccountCreationSourceDistribution": "Account creation source distribution",
|
||||||
|
"AccountConnectivityStatusDistribution": "Account connectivity status distribution",
|
||||||
|
"AccountPasswordChangeTrends": "Account password change trends",
|
||||||
|
"RankByNumberOfAssetAccounts": "Ranking by number of asset accounts",
|
||||||
|
"AccountAndPasswordChangeRank": "Account and password change ranking",
|
||||||
|
"AccountAutomationReport": "Account automation report",
|
||||||
|
"AccountAutomationOverview": "Account automation overview",
|
||||||
|
"TaskExecutionTrends": "Task execution trends",
|
||||||
"EmailHelpText": "Please click the 'Submit' button to save the current configuration before clicking 'Test Connection' to ensure the settings take effect."
|
"EmailHelpText": "Please click the 'Submit' button to save the current configuration before clicking 'Test Connection' to ensure the settings take effect."
|
||||||
}
|
}
|
||||||
|
|
|
@ -1568,6 +1568,48 @@
|
||||||
"setVariable": "设置参数",
|
"setVariable": "设置参数",
|
||||||
"userId": "用户ID",
|
"userId": "用户ID",
|
||||||
"userName": "用户名",
|
"userName": "用户名",
|
||||||
|
"UserReport": "用户报表",
|
||||||
|
"AssetReport": "资产报表",
|
||||||
|
"AccountReport": "账号报表",
|
||||||
|
"NotEnableMfa": "未启动 MFA",
|
||||||
|
"FaceVector": "人脸信息",
|
||||||
|
"NeedUpdatePassword": "需改密",
|
||||||
|
"UserOverview": "用户概览",
|
||||||
|
"LoginSource": "登录来源",
|
||||||
|
"LoginCtyDistribution": "登录城市分布",
|
||||||
|
"VisitTimeDistribution": "访问时段分布",
|
||||||
|
"LoginMethodStatistics": "登录方法统计",
|
||||||
|
"UserLoginReport": "用户登录报告",
|
||||||
|
"UserChangePasswordReport": "用户改密报告",
|
||||||
|
"ChangePasswordOverview": "改密概览",
|
||||||
|
"Operator": "操作用户",
|
||||||
|
"TargetUser": "目标用户",
|
||||||
|
"OperatorGeographicDistribution": "操作用户地域分布",
|
||||||
|
"UserModificationTrends": "用户修改趋势",
|
||||||
|
"ModifyTheTargetUserTopTank": "修改目标用户排名",
|
||||||
|
"TopRankOfOperateUsers": "操作用户排名",
|
||||||
|
"AssetStatisticsReport": "资产统计报告",
|
||||||
|
"AssetActivityReport": "资产活动报告",
|
||||||
|
"AssetOverview": "用户概览",
|
||||||
|
"LinkedDomains": "已关联网域",
|
||||||
|
"ConnectedDirectoryServices": "已关联目录服务",
|
||||||
|
"AssetTypeDistribution": "资产类型分布",
|
||||||
|
"WeeklyGrowthTrend": "本周新增趋势",
|
||||||
|
"DistributionOfAssetLoginMethods": "资产登录方式分布",
|
||||||
|
"RemoteLoginProtocolUsageDistribution": "远程登录协议使用分布",
|
||||||
|
"OperatingSystemDistributionOfLoginAssets": "登录资产的操作系统分布",
|
||||||
|
"AssetLoginTrends": "资产登录趋势",
|
||||||
|
"ConnectionCount": "连接数",
|
||||||
|
"AccountStatisticsReport": "账号统计报告",
|
||||||
|
"AccountOverview": "账号概览",
|
||||||
|
"AccountCreationSourceDistribution": "账号创建来源分布",
|
||||||
|
"AccountConnectivityStatusDistribution": "账号可连接性状态分布",
|
||||||
|
"AccountPasswordChangeTrends": "账号改密趋势",
|
||||||
|
"RankByNumberOfAssetAccounts": "资产账号数量排名",
|
||||||
|
"AccountAndPasswordChangeRank": "账号密码变更排名",
|
||||||
|
"AccountAutomationReport": "账号自动化报告",
|
||||||
|
"AccountAutomationOverview": "自动化概览",
|
||||||
|
"TaskExecutionTrends": "任务执行趋势",
|
||||||
"EmailHelpText": "请点击'提交'按钮保存当前配置后,再点击'测试连接'以确保信息生效",
|
"EmailHelpText": "请点击'提交'按钮保存当前配置后,再点击'测试连接'以确保信息生效",
|
||||||
"None": "无"
|
"None": "无"
|
||||||
}
|
}
|
||||||
|
|
|
@ -136,6 +136,7 @@ INSTALLED_APPS = [
|
||||||
'notifications.apps.NotificationsConfig',
|
'notifications.apps.NotificationsConfig',
|
||||||
'rbac.apps.RBACConfig',
|
'rbac.apps.RBACConfig',
|
||||||
'labels.apps.LabelsConfig',
|
'labels.apps.LabelsConfig',
|
||||||
|
'reports.apps.ReportsConfig',
|
||||||
'rest_framework',
|
'rest_framework',
|
||||||
'drf_yasg',
|
'drf_yasg',
|
||||||
'django_cas_ng',
|
'django_cas_ng',
|
||||||
|
|
|
@ -30,6 +30,7 @@ resource_api = [
|
||||||
path('notifications/', include('notifications.urls.api_urls', namespace='api-notifications')),
|
path('notifications/', include('notifications.urls.api_urls', namespace='api-notifications')),
|
||||||
path('rbac/', include('rbac.urls.api_urls', namespace='api-rbac')),
|
path('rbac/', include('rbac.urls.api_urls', namespace='api-rbac')),
|
||||||
path('labels/', include('labels.urls', namespace='api-label')),
|
path('labels/', include('labels.urls', namespace='api-label')),
|
||||||
|
path('reports/', include('reports.urls.api_urls', namespace='api-reports')),
|
||||||
]
|
]
|
||||||
|
|
||||||
api_v1 = resource_api + [
|
api_v1 = resource_api + [
|
||||||
|
@ -42,6 +43,7 @@ api_v1 = resource_api + [
|
||||||
app_view_patterns = [
|
app_view_patterns = [
|
||||||
path('auth/', include('authentication.urls.view_urls'), name='auth'),
|
path('auth/', include('authentication.urls.view_urls'), name='auth'),
|
||||||
path('ops/', include('ops.urls.view_urls'), name='ops'),
|
path('ops/', include('ops.urls.view_urls'), name='ops'),
|
||||||
|
path('reports/', include('reports.urls.view_urls'), name='reports'),
|
||||||
path('tickets/', include('tickets.urls.view_urls'), name='tickets'),
|
path('tickets/', include('tickets.urls.view_urls'), name='tickets'),
|
||||||
path('common/', include('common.urls.view_urls'), name='common'),
|
path('common/', include('common.urls.view_urls'), name='common'),
|
||||||
re_path(r'flower/(?P<path>.*)', views.celery_flower_view, name='flower-view'),
|
re_path(r'flower/(?P<path>.*)', views.celery_flower_view, name='flower-view'),
|
||||||
|
|
|
@ -0,0 +1,3 @@
|
||||||
|
from django.contrib import admin
|
||||||
|
|
||||||
|
# Register your models here.
|
|
@ -0,0 +1,4 @@
|
||||||
|
from .accouts import *
|
||||||
|
from .assets import *
|
||||||
|
from .report import *
|
||||||
|
from .users import *
|
|
@ -0,0 +1,2 @@
|
||||||
|
from .account import *
|
||||||
|
from .automation import *
|
|
@ -0,0 +1,96 @@
|
||||||
|
# -*- coding: utf-8 -*-
|
||||||
|
#
|
||||||
|
from collections import defaultdict
|
||||||
|
|
||||||
|
from django.db.models import Count, Q, F, Value
|
||||||
|
from django.db.models.functions import Concat
|
||||||
|
from django.http import JsonResponse
|
||||||
|
from rest_framework.views import APIView
|
||||||
|
|
||||||
|
from accounts.models import Account, AccountTemplate
|
||||||
|
from assets.const import Connectivity
|
||||||
|
from common.permissions import IsValidLicense
|
||||||
|
from common.utils import lazyproperty
|
||||||
|
from rbac.permissions import RBACPermission
|
||||||
|
from reports.api.assets.base import group_stats
|
||||||
|
from reports.mixins import DateRangeMixin
|
||||||
|
|
||||||
|
__all__ = ['AccountStatisticApi']
|
||||||
|
|
||||||
|
|
||||||
|
class AccountStatisticApi(DateRangeMixin, APIView):
|
||||||
|
http_method_names = ['get']
|
||||||
|
rbac_perms = {
|
||||||
|
'GET': 'accounts.view_account',
|
||||||
|
}
|
||||||
|
permission_classes = [RBACPermission, IsValidLicense]
|
||||||
|
|
||||||
|
@lazyproperty
|
||||||
|
def base_qs(self):
|
||||||
|
return Account.objects.all()
|
||||||
|
|
||||||
|
@lazyproperty
|
||||||
|
def template_qs(self):
|
||||||
|
return AccountTemplate.objects.all()
|
||||||
|
|
||||||
|
def get_change_secret_account_metrics(self):
|
||||||
|
filtered_queryset = self.filter_by_date_range(self.base_qs, 'date_change_secret')
|
||||||
|
|
||||||
|
data = defaultdict(set)
|
||||||
|
for t, _id in filtered_queryset.values_list('date_change_secret', 'id'):
|
||||||
|
date_str = str(t.date())
|
||||||
|
data[date_str].add(_id)
|
||||||
|
|
||||||
|
metrics = [len(data.get(str(d), set())) for d in self.date_range_list]
|
||||||
|
return metrics
|
||||||
|
|
||||||
|
def get(self, request, *args, **kwargs):
|
||||||
|
qs = self.base_qs
|
||||||
|
|
||||||
|
stats = qs.aggregate(
|
||||||
|
total=Count(1),
|
||||||
|
active=Count(1, filter=Q(is_active=True)),
|
||||||
|
connected=Count(1, filter=Q(connectivity=Connectivity.OK)),
|
||||||
|
su_from=Count(1, filter=Q(su_from__isnull=False)),
|
||||||
|
date_change_secret=Count(1, filter=Q(secret_reset=True)),
|
||||||
|
)
|
||||||
|
|
||||||
|
stats['template_total'] = self.template_qs.count()
|
||||||
|
|
||||||
|
source_pie_data = [
|
||||||
|
{'name': str(source), 'value': total}
|
||||||
|
for source, total in
|
||||||
|
qs.values('source').annotate(
|
||||||
|
total=Count(1)
|
||||||
|
).values_list('source', 'total')
|
||||||
|
]
|
||||||
|
|
||||||
|
by_connectivity = group_stats(
|
||||||
|
qs, 'label', 'connectivity', Connectivity.as_dict(),
|
||||||
|
)
|
||||||
|
|
||||||
|
top_assets = qs.values('asset__name') \
|
||||||
|
.annotate(account_count=Count('id')) \
|
||||||
|
.order_by('-account_count')[:10]
|
||||||
|
|
||||||
|
top_version_accounts = qs.annotate(
|
||||||
|
display_key=Concat(
|
||||||
|
F('asset__name'),
|
||||||
|
Value('('),
|
||||||
|
F('username'),
|
||||||
|
Value(')')
|
||||||
|
)
|
||||||
|
).values('display_key', 'version').order_by('-version')[:10]
|
||||||
|
|
||||||
|
payload = {
|
||||||
|
'account_stats': stats,
|
||||||
|
'top_assets': list(top_assets),
|
||||||
|
'top_version_accounts': list(top_version_accounts),
|
||||||
|
'source_pie': source_pie_data,
|
||||||
|
'by_connectivity': by_connectivity,
|
||||||
|
'change_secret_account_metrics': {
|
||||||
|
'dates_metrics_date': self.dates_metrics_date,
|
||||||
|
'dates_metrics_total': self.get_change_secret_account_metrics(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return JsonResponse(payload, status=200)
|
|
@ -0,0 +1,87 @@
|
||||||
|
# -*- coding: utf-8 -*-
|
||||||
|
#
|
||||||
|
from collections import defaultdict
|
||||||
|
|
||||||
|
from django.http import JsonResponse
|
||||||
|
from rest_framework.views import APIView
|
||||||
|
|
||||||
|
from accounts.const import AutomationTypes
|
||||||
|
from accounts.models import ChangeSecretAutomation, PushAccountAutomation, BackupAccountAutomation, \
|
||||||
|
CheckAccountAutomation, GatherAccountsAutomation, AutomationExecution
|
||||||
|
from common.permissions import IsValidLicense
|
||||||
|
from rbac.permissions import RBACPermission
|
||||||
|
from reports.mixins import DateRangeMixin
|
||||||
|
|
||||||
|
__all__ = ['AccountAutomationApi']
|
||||||
|
|
||||||
|
|
||||||
|
class AccountAutomationApi(DateRangeMixin, APIView):
|
||||||
|
http_method_names = ['get']
|
||||||
|
rbac_perms = {
|
||||||
|
'GET': 'accounts.view_account',
|
||||||
|
}
|
||||||
|
permission_classes = [RBACPermission, IsValidLicense]
|
||||||
|
|
||||||
|
@property
|
||||||
|
def change_secret_qs(self):
|
||||||
|
return ChangeSecretAutomation.objects.all()
|
||||||
|
|
||||||
|
@property
|
||||||
|
def push_qs(self):
|
||||||
|
return PushAccountAutomation.objects.all()
|
||||||
|
|
||||||
|
@property
|
||||||
|
def backup_qs(self):
|
||||||
|
return BackupAccountAutomation.objects.all()
|
||||||
|
|
||||||
|
@property
|
||||||
|
def check_qs(self):
|
||||||
|
return CheckAccountAutomation.objects.all()
|
||||||
|
|
||||||
|
@property
|
||||||
|
def collect_qs(self):
|
||||||
|
return GatherAccountsAutomation.objects.all()
|
||||||
|
|
||||||
|
def get_execution_metrics(self):
|
||||||
|
executions = AutomationExecution.objects.filter(type__in=AutomationTypes.values)
|
||||||
|
filtered_queryset = self.filter_by_date_range(executions, 'date_start')
|
||||||
|
|
||||||
|
types = set()
|
||||||
|
data = defaultdict(lambda: defaultdict(int))
|
||||||
|
for t, tp in filtered_queryset.values_list('date_start', 'type'):
|
||||||
|
if not tp:
|
||||||
|
continue
|
||||||
|
types.add(tp)
|
||||||
|
date_str = str(t.date())
|
||||||
|
data[date_str][tp] += 1
|
||||||
|
|
||||||
|
tp_map = defaultdict(list)
|
||||||
|
for d in self.date_range_list:
|
||||||
|
tp_data = data.get(str(d), {})
|
||||||
|
for tp in types:
|
||||||
|
tp_map[tp].append(tp_data.get(tp, 0))
|
||||||
|
|
||||||
|
metrics = {}
|
||||||
|
for tp, values in tp_map.items():
|
||||||
|
_tp = AutomationTypes(tp).label
|
||||||
|
metrics[str(_tp)] = values
|
||||||
|
|
||||||
|
return metrics
|
||||||
|
|
||||||
|
def get(self, request, *args, **kwargs):
|
||||||
|
stats = {
|
||||||
|
'push': self.push_qs.count(),
|
||||||
|
'check': self.check_qs.count(),
|
||||||
|
'backup': self.backup_qs.count(),
|
||||||
|
'collect': self.collect_qs.count(),
|
||||||
|
'change_secret': self.change_secret_qs.count(),
|
||||||
|
}
|
||||||
|
|
||||||
|
payload = {
|
||||||
|
'automation_stats': stats,
|
||||||
|
'execution_metrics': {
|
||||||
|
'dates_metrics_date': self.dates_metrics_date,
|
||||||
|
'data': self.get_execution_metrics()
|
||||||
|
},
|
||||||
|
}
|
||||||
|
return JsonResponse(payload, status=200)
|
|
@ -0,0 +1,21 @@
|
||||||
|
from django.db.models import Count, F
|
||||||
|
|
||||||
|
|
||||||
|
def group_stats(queryset, alias, key, label_map=None):
|
||||||
|
grouped = (
|
||||||
|
queryset
|
||||||
|
.exclude(**{f'{key}__isnull': True})
|
||||||
|
.values(**{alias: F(key)})
|
||||||
|
.annotate(total=Count('id'))
|
||||||
|
)
|
||||||
|
|
||||||
|
data = [
|
||||||
|
{
|
||||||
|
alias: val,
|
||||||
|
'total': cnt,
|
||||||
|
**({'label': label_map.get(val, val)} if label_map else {})
|
||||||
|
}
|
||||||
|
for val, cnt in grouped.values_list(alias, 'total')
|
||||||
|
]
|
||||||
|
|
||||||
|
return data
|
|
@ -0,0 +1,2 @@
|
||||||
|
from .activity import *
|
||||||
|
from .asset import *
|
|
@ -0,0 +1,78 @@
|
||||||
|
# -*- coding: utf-8 -*-
|
||||||
|
#
|
||||||
|
from collections import defaultdict
|
||||||
|
|
||||||
|
from django.db.models import Count
|
||||||
|
from django.http.response import JsonResponse
|
||||||
|
from rest_framework.views import APIView
|
||||||
|
|
||||||
|
from assets.const import AllTypes
|
||||||
|
from assets.models import Asset
|
||||||
|
from common.permissions import IsValidLicense
|
||||||
|
from common.utils import lazyproperty
|
||||||
|
from rbac.permissions import RBACPermission
|
||||||
|
from reports.api.assets.base import group_stats
|
||||||
|
from reports.mixins import DateRangeMixin
|
||||||
|
from terminal.const import LoginFrom
|
||||||
|
from terminal.models import Session
|
||||||
|
|
||||||
|
__all__ = ['AssetActivityApi']
|
||||||
|
|
||||||
|
|
||||||
|
class AssetActivityApi(DateRangeMixin, APIView):
|
||||||
|
http_method_names = ['get']
|
||||||
|
rbac_perms = {
|
||||||
|
'GET': 'terminal.view_session',
|
||||||
|
}
|
||||||
|
permission_classes = [RBACPermission, IsValidLicense]
|
||||||
|
|
||||||
|
def get_asset_login_metrics(self, queryset):
|
||||||
|
data = defaultdict(set)
|
||||||
|
for t, _id in queryset.values_list('date_start', 'id'):
|
||||||
|
date_str = str(t.date())
|
||||||
|
data[date_str].add(_id)
|
||||||
|
|
||||||
|
metrics = [len(data.get(str(d), set())) for d in self.date_range_list]
|
||||||
|
return metrics
|
||||||
|
|
||||||
|
@lazyproperty
|
||||||
|
def session_qs(self):
|
||||||
|
return Session.objects.all()
|
||||||
|
|
||||||
|
def get(self, request, *args, **kwargs):
|
||||||
|
qs = self.session_qs
|
||||||
|
qs = self.filter_by_date_range(qs, 'date_start')
|
||||||
|
all_type_dict = dict(AllTypes.choices())
|
||||||
|
|
||||||
|
stats = qs.aggregate(
|
||||||
|
total=Count(1),
|
||||||
|
asset_count=Count('asset_id', distinct=True),
|
||||||
|
user_count=Count('user_id', distinct=True),
|
||||||
|
)
|
||||||
|
|
||||||
|
asset_ids = {str(_id) for _id in qs.values_list('asset_id', flat=True).distinct()}
|
||||||
|
assets = Asset.objects.filter(id__in=asset_ids)
|
||||||
|
|
||||||
|
asset_login_by_protocol = group_stats(
|
||||||
|
qs, 'label', 'protocol'
|
||||||
|
)
|
||||||
|
|
||||||
|
asset_login_by_from = group_stats(
|
||||||
|
qs, 'label', 'login_from', LoginFrom.as_dict()
|
||||||
|
)
|
||||||
|
|
||||||
|
asset_by_type = group_stats(
|
||||||
|
assets, 'label', 'platform__type', all_type_dict,
|
||||||
|
)
|
||||||
|
|
||||||
|
payload = {
|
||||||
|
'session_stats': stats,
|
||||||
|
'asset_login_by_type': asset_by_type,
|
||||||
|
'asset_login_by_from': asset_login_by_from,
|
||||||
|
'asset_login_by_protocol': asset_login_by_protocol,
|
||||||
|
'asset_login_log_metrics': {
|
||||||
|
'dates_metrics_date': self.dates_metrics_date,
|
||||||
|
'dates_metrics_total': self.get_asset_login_metrics(qs),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return JsonResponse(payload, status=200)
|
|
@ -0,0 +1,85 @@
|
||||||
|
# -*- coding: utf-8 -*-
|
||||||
|
#
|
||||||
|
from collections import defaultdict, OrderedDict
|
||||||
|
|
||||||
|
from django.db.models import Count, Q
|
||||||
|
from django.http import JsonResponse
|
||||||
|
from rest_framework.views import APIView
|
||||||
|
|
||||||
|
from assets.const import AllTypes, Connectivity
|
||||||
|
from assets.models import Asset, Platform
|
||||||
|
from common.permissions import IsValidLicense
|
||||||
|
from common.utils import lazyproperty
|
||||||
|
from rbac.permissions import RBACPermission
|
||||||
|
from reports.api.assets.base import group_stats
|
||||||
|
from reports.mixins import DateRangeMixin
|
||||||
|
|
||||||
|
__all__ = ['AssetStatisticApi']
|
||||||
|
|
||||||
|
|
||||||
|
class AssetStatisticApi(DateRangeMixin, APIView):
|
||||||
|
http_method_names = ['get']
|
||||||
|
rbac_perms = {
|
||||||
|
'GET': 'assets.view_asset',
|
||||||
|
}
|
||||||
|
permission_classes = [RBACPermission, IsValidLicense]
|
||||||
|
|
||||||
|
@lazyproperty
|
||||||
|
def base_qs(self):
|
||||||
|
return Asset.objects.all()
|
||||||
|
|
||||||
|
def get_added_asset_metrics(self):
|
||||||
|
filtered_queryset = self.filter_by_date_range(self.base_qs, 'date_created')
|
||||||
|
|
||||||
|
data = defaultdict(set)
|
||||||
|
for t, _id in filtered_queryset.values_list('date_created', 'id'):
|
||||||
|
date_str = str(t.date())
|
||||||
|
data[date_str].add(_id)
|
||||||
|
|
||||||
|
metrics = [len(data.get(str(d), set())) for d in self.date_range_list]
|
||||||
|
return metrics
|
||||||
|
|
||||||
|
def get(self, request, *args, **kwargs):
|
||||||
|
qs = self.base_qs
|
||||||
|
all_type_dict = dict(AllTypes.choices())
|
||||||
|
|
||||||
|
stats = qs.aggregate(
|
||||||
|
total=Count(1),
|
||||||
|
active=Count(1, filter=Q(is_active=True)),
|
||||||
|
connected=Count(1, filter=Q(connectivity=Connectivity.OK)),
|
||||||
|
zone=Count(1, filter=Q(zone__isnull=False)),
|
||||||
|
directory_services=Count(1, filter=Q(directory_services__isnull=False)),
|
||||||
|
)
|
||||||
|
|
||||||
|
type_category_map = {
|
||||||
|
d['label']: str(d['category'].label)
|
||||||
|
for d in AllTypes.types()
|
||||||
|
}
|
||||||
|
|
||||||
|
by_type = group_stats(
|
||||||
|
qs, 'type', 'platform__type', all_type_dict,
|
||||||
|
)
|
||||||
|
|
||||||
|
by_type_category = defaultdict(list)
|
||||||
|
for item in by_type:
|
||||||
|
category = type_category_map.get(item['label'], 'Other')
|
||||||
|
by_type_category[category].append(item)
|
||||||
|
|
||||||
|
sorted_category_assets = OrderedDict()
|
||||||
|
desired_order = [str(i['label']) for i in AllTypes.categories()]
|
||||||
|
for category in desired_order:
|
||||||
|
sorted_category_assets[category] = by_type_category.get(category, [])
|
||||||
|
|
||||||
|
stats.update({
|
||||||
|
'platform_count': Platform.objects.all().count(),
|
||||||
|
})
|
||||||
|
|
||||||
|
payload = {
|
||||||
|
'asset_stats': stats,
|
||||||
|
'assets_by_type_category': sorted_category_assets,
|
||||||
|
'added_asset_metrics': {
|
||||||
|
'dates_metrics_date': self.dates_metrics_date,
|
||||||
|
'dates_metrics_total': self.get_added_asset_metrics(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return JsonResponse(payload, status=200)
|
|
@ -0,0 +1,21 @@
|
||||||
|
from django.db.models import Count, F
|
||||||
|
|
||||||
|
|
||||||
|
def group_stats(queryset, alias, key, label_map=None):
|
||||||
|
grouped = (
|
||||||
|
queryset
|
||||||
|
.exclude(**{f'{key}__isnull': True})
|
||||||
|
.values(**{alias: F(key)})
|
||||||
|
.annotate(total=Count('id'))
|
||||||
|
)
|
||||||
|
|
||||||
|
data = [
|
||||||
|
{
|
||||||
|
alias: val,
|
||||||
|
'total': cnt,
|
||||||
|
**({'label': label_map.get(val, val)} if label_map else {})
|
||||||
|
}
|
||||||
|
for val, cnt in grouped.values_list(alias, 'total')
|
||||||
|
]
|
||||||
|
|
||||||
|
return data
|
|
@ -0,0 +1,9 @@
|
||||||
|
from rest_framework.generics import ListAPIView
|
||||||
|
from rest_framework.response import Response
|
||||||
|
|
||||||
|
__all__ = ['ReportViewSet']
|
||||||
|
|
||||||
|
|
||||||
|
class ReportViewSet(ListAPIView):
|
||||||
|
def list(self, request, *args, **kwargs):
|
||||||
|
return Response([])
|
|
@ -0,0 +1,2 @@
|
||||||
|
from .change_password import *
|
||||||
|
from .user import *
|
|
@ -0,0 +1,86 @@
|
||||||
|
# -*- coding: utf-8 -*-
|
||||||
|
#
|
||||||
|
from collections import defaultdict
|
||||||
|
|
||||||
|
from django.db.models import Count
|
||||||
|
from django.http.response import JsonResponse
|
||||||
|
from rest_framework.views import APIView
|
||||||
|
|
||||||
|
from audits.models import PasswordChangeLog
|
||||||
|
from common.permissions import IsValidLicense
|
||||||
|
from common.utils import lazyproperty, get_ip_city, get_logger
|
||||||
|
from rbac.permissions import RBACPermission
|
||||||
|
from reports.mixins import DateRangeMixin
|
||||||
|
|
||||||
|
__all__ = ['UserChangeSecretApi']
|
||||||
|
|
||||||
|
logger = get_logger(__file__)
|
||||||
|
|
||||||
|
|
||||||
|
class UserChangeSecretApi(DateRangeMixin, APIView):
|
||||||
|
http_method_names = ['get']
|
||||||
|
rbac_perms = {
|
||||||
|
'GET': 'audits.view_passwordchangelog',
|
||||||
|
}
|
||||||
|
permission_classes = [RBACPermission, IsValidLicense]
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def get_change_password_region_distribution(queryset):
|
||||||
|
unique_ips = queryset.values_list('remote_addr', flat=True).distinct()
|
||||||
|
data = defaultdict(int)
|
||||||
|
for ip in unique_ips:
|
||||||
|
try:
|
||||||
|
city = str(get_ip_city(ip))
|
||||||
|
if not city:
|
||||||
|
continue
|
||||||
|
data[city] += 1
|
||||||
|
except Exception:
|
||||||
|
logger.debug(f"Failed to get city for IP {ip}, skipping", exc_info=True)
|
||||||
|
continue
|
||||||
|
|
||||||
|
return [{'name': k, 'value': v} for k, v in data.items()]
|
||||||
|
|
||||||
|
def get_change_password_metrics(self, queryset):
|
||||||
|
filtered_queryset = self.filter_by_date_range(queryset, 'datetime')
|
||||||
|
|
||||||
|
data = defaultdict(set)
|
||||||
|
for t, username in filtered_queryset.values_list('datetime', 'user'):
|
||||||
|
date_str = str(t.date())
|
||||||
|
data[date_str].add(username)
|
||||||
|
|
||||||
|
metrics = [len(data.get(str(d), set())) for d in self.date_range_list]
|
||||||
|
return metrics
|
||||||
|
|
||||||
|
@lazyproperty
|
||||||
|
def change_password_queryset(self):
|
||||||
|
queryset = PasswordChangeLog.objects.all()
|
||||||
|
return PasswordChangeLog.filter_queryset_by_org(queryset)
|
||||||
|
|
||||||
|
def get(self, request, *args, **kwargs):
|
||||||
|
data = {}
|
||||||
|
|
||||||
|
qs = self.filter_by_date_range(self.change_password_queryset, 'datetime')
|
||||||
|
|
||||||
|
total = qs.count()
|
||||||
|
change_password_top10_users = qs.values(
|
||||||
|
'user').annotate(count=Count('id')).order_by('-count')[:10]
|
||||||
|
|
||||||
|
change_password_top10_change_bys = qs.values(
|
||||||
|
'change_by').annotate(count=Count('id')).order_by('-count')[:10]
|
||||||
|
|
||||||
|
data['total_count_change_password'] = {
|
||||||
|
'total': total,
|
||||||
|
'user_total': qs.values('user').distinct().count(),
|
||||||
|
'change_by_total': qs.values('change_by').distinct().count(),
|
||||||
|
}
|
||||||
|
|
||||||
|
data['change_password_top10_users'] = list(change_password_top10_users)
|
||||||
|
data['change_password_top10_change_bys'] = list(change_password_top10_change_bys)
|
||||||
|
|
||||||
|
data['user_change_password_metrics'] = {
|
||||||
|
'dates_metrics_date': self.dates_metrics_date,
|
||||||
|
'dates_metrics_total': self.get_change_password_metrics(qs),
|
||||||
|
}
|
||||||
|
|
||||||
|
data['change_password_region_distribution'] = self.get_change_password_region_distribution(qs)
|
||||||
|
return JsonResponse(data, status=200)
|
|
@ -0,0 +1,129 @@
|
||||||
|
# -*- coding: utf-8 -*-
|
||||||
|
#
|
||||||
|
from collections import defaultdict
|
||||||
|
|
||||||
|
from django.db.models import Count, Q
|
||||||
|
from django.http.response import JsonResponse
|
||||||
|
from rest_framework.views import APIView
|
||||||
|
|
||||||
|
from audits.const import LoginStatusChoices
|
||||||
|
from audits.models import UserLoginLog
|
||||||
|
from common.permissions import IsValidLicense
|
||||||
|
from common.utils import lazyproperty
|
||||||
|
from rbac.permissions import RBACPermission
|
||||||
|
from reports.mixins import DateRangeMixin
|
||||||
|
|
||||||
|
__all__ = ['UserReportApi']
|
||||||
|
|
||||||
|
from users.models import User
|
||||||
|
from users.models.user import Source
|
||||||
|
|
||||||
|
|
||||||
|
class UserReportApi(DateRangeMixin, APIView):
|
||||||
|
http_method_names = ['get']
|
||||||
|
rbac_perms = {
|
||||||
|
'GET': 'users.view_users',
|
||||||
|
}
|
||||||
|
permission_classes = [RBACPermission, IsValidLicense]
|
||||||
|
|
||||||
|
def get_user_login_metrics(self, queryset):
|
||||||
|
filtered_queryset = self.filter_by_date_range(queryset, 'datetime')
|
||||||
|
|
||||||
|
data = defaultdict(set)
|
||||||
|
for t, username in filtered_queryset.values_list('datetime', 'username'):
|
||||||
|
date_str = str(t.date())
|
||||||
|
data[date_str].add(username)
|
||||||
|
|
||||||
|
metrics = [len(data.get(str(d), set())) for d in self.date_range_list]
|
||||||
|
return metrics
|
||||||
|
|
||||||
|
def get_user_login_method_metrics(self):
|
||||||
|
filtered_queryset = self.filter_by_date_range(self.user_login_log_queryset, 'datetime')
|
||||||
|
|
||||||
|
backends = set()
|
||||||
|
data = defaultdict(lambda: defaultdict(set))
|
||||||
|
for t, username, backend in filtered_queryset.values_list('datetime', 'username', 'backend'):
|
||||||
|
backends.add(backend)
|
||||||
|
date_str = str(t.date())
|
||||||
|
data[date_str][backend].add(username)
|
||||||
|
metrics = defaultdict(list)
|
||||||
|
for t in self.date_range_list:
|
||||||
|
date_str = str(t)
|
||||||
|
for backend in backends:
|
||||||
|
username = data.get(date_str) if data.get(date_str) else {backend: set()}
|
||||||
|
metrics[backend].append(len(username.get(backend, set())))
|
||||||
|
return metrics
|
||||||
|
|
||||||
|
def get_user_login_region_distribution(self):
|
||||||
|
filtered_queryset = self.filter_by_date_range(self.user_login_log_queryset, 'datetime')
|
||||||
|
|
||||||
|
data = filtered_queryset.values('city').annotate(
|
||||||
|
user_count=Count('username', distinct=True)
|
||||||
|
).order_by('-user_count')
|
||||||
|
metrics = [{'name': d['city'], 'value': d['user_count']} for d in data]
|
||||||
|
return metrics
|
||||||
|
|
||||||
|
def get_user_login_time_metrics(self):
|
||||||
|
time_buckets = {
|
||||||
|
'00:00-06:00': (0, 6),
|
||||||
|
'06:00-12:00': (6, 12),
|
||||||
|
'12:00-18:00': (12, 18),
|
||||||
|
'18:00-24:00': (18, 24),
|
||||||
|
}
|
||||||
|
filtered_queryset = self.filter_by_date_range(self.user_login_log_queryset, 'datetime').all()
|
||||||
|
metrics = {bucket: 0 for bucket in time_buckets.keys()}
|
||||||
|
for date in filtered_queryset:
|
||||||
|
hour = date.datetime.hour
|
||||||
|
for bucket, (start, end) in time_buckets.items():
|
||||||
|
if start <= hour < end:
|
||||||
|
metrics[bucket] = metrics.get(bucket, 0) + 1
|
||||||
|
return metrics
|
||||||
|
|
||||||
|
@lazyproperty
|
||||||
|
def user_login_log_queryset(self):
|
||||||
|
queryset = UserLoginLog.objects.filter(status=LoginStatusChoices.success)
|
||||||
|
return UserLoginLog.filter_queryset_by_org(queryset)
|
||||||
|
|
||||||
|
@lazyproperty
|
||||||
|
def user_login_failed_queryset(self):
|
||||||
|
queryset = UserLoginLog.objects.filter(status=LoginStatusChoices.failed)
|
||||||
|
return UserLoginLog.filter_queryset_by_org(queryset)
|
||||||
|
|
||||||
|
@lazyproperty
|
||||||
|
def user_qs(self):
|
||||||
|
return User.get_org_users()
|
||||||
|
|
||||||
|
def get(self, request, *args, **kwargs):
|
||||||
|
data = {}
|
||||||
|
|
||||||
|
user_stats = self.user_qs.aggregate(
|
||||||
|
total=Count(1),
|
||||||
|
first_login=Count(1, filter=Q(is_first_login=True)),
|
||||||
|
need_update_password=Count(1, filter=Q(need_update_password=True)),
|
||||||
|
face_vector=Count(1, filter=Q(face_vector__isnull=False)),
|
||||||
|
not_enabled_mfa=Count(1, filter=Q(mfa_level=0)),
|
||||||
|
)
|
||||||
|
|
||||||
|
user_stats['valid'] = sum(1 for u in self.user_qs if u.is_valid)
|
||||||
|
data['user_stats'] = user_stats
|
||||||
|
|
||||||
|
source_map = Source.as_dict()
|
||||||
|
user_by_source = defaultdict(int)
|
||||||
|
for source in self.user_qs.values_list('source', flat=True):
|
||||||
|
k = source_map.get(source, source)
|
||||||
|
user_by_source[str(k)] += 1
|
||||||
|
|
||||||
|
data['user_by_source'] = [{'name': k, 'value': v} for k, v in user_by_source.items()]
|
||||||
|
|
||||||
|
data['user_login_log_metrics'] = {
|
||||||
|
'dates_metrics_date': self.dates_metrics_date,
|
||||||
|
'dates_metrics_success_total': self.get_user_login_metrics(self.user_login_log_queryset),
|
||||||
|
'dates_metrics_failure_total': self.get_user_login_metrics(self.user_login_failed_queryset),
|
||||||
|
}
|
||||||
|
data['user_login_method_metrics'] = {
|
||||||
|
'dates_metrics_date': self.dates_metrics_date,
|
||||||
|
'dates_metrics_total': self.get_user_login_method_metrics(),
|
||||||
|
}
|
||||||
|
data['user_login_region_distribution'] = self.get_user_login_region_distribution()
|
||||||
|
data['user_login_time_metrics'] = self.get_user_login_time_metrics()
|
||||||
|
return JsonResponse(data, status=200)
|
|
@ -0,0 +1,6 @@
|
||||||
|
from django.apps import AppConfig
|
||||||
|
|
||||||
|
|
||||||
|
class ReportsConfig(AppConfig):
|
||||||
|
default_auto_field = "django.db.models.BigAutoField"
|
||||||
|
name = "reports"
|
|
@ -0,0 +1,46 @@
|
||||||
|
from django.utils import timezone
|
||||||
|
from rest_framework.request import Request
|
||||||
|
|
||||||
|
from common.utils import lazyproperty
|
||||||
|
from common.utils.timezone import local_zero_hour, local_now
|
||||||
|
|
||||||
|
|
||||||
|
class DateRangeMixin:
|
||||||
|
request: Request
|
||||||
|
days_param = 'days'
|
||||||
|
default_days = 1
|
||||||
|
|
||||||
|
@lazyproperty
|
||||||
|
def days(self) -> int:
|
||||||
|
raw = self.request.query_params.get(self.days_param, self.default_days)
|
||||||
|
try:
|
||||||
|
return int(raw)
|
||||||
|
except (ValueError, TypeError):
|
||||||
|
return self.default_days
|
||||||
|
|
||||||
|
@property
|
||||||
|
def start_datetime(self):
|
||||||
|
if self.days == 1:
|
||||||
|
return local_zero_hour()
|
||||||
|
return local_now() - timezone.timedelta(days=self.days)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def date_range_bounds(self) -> tuple:
|
||||||
|
start = self.start_datetime.date()
|
||||||
|
end = (local_now() + timezone.timedelta(days=1)).date()
|
||||||
|
return start, end
|
||||||
|
|
||||||
|
@lazyproperty
|
||||||
|
def date_range_list(self) -> list:
|
||||||
|
return [
|
||||||
|
(local_now() - timezone.timedelta(days=i)).date()
|
||||||
|
for i in range(self.days - 1, -1, -1)
|
||||||
|
]
|
||||||
|
|
||||||
|
def filter_by_date_range(self, queryset, field_name: str):
|
||||||
|
start, end = self.date_range_bounds
|
||||||
|
return queryset.filter(**{f'{field_name}__range': (start, end)})
|
||||||
|
|
||||||
|
@lazyproperty
|
||||||
|
def dates_metrics_date(self):
|
||||||
|
return [date.strftime('%m-%d') for date in self.date_range_list] or ['0']
|
|
@ -0,0 +1,3 @@
|
||||||
|
from django.db import models
|
||||||
|
|
||||||
|
# Create your models here.
|
|
@ -0,0 +1,3 @@
|
||||||
|
from django.test import TestCase
|
||||||
|
|
||||||
|
# Create your tests here.
|
|
@ -0,0 +1,15 @@
|
||||||
|
from django.urls import path
|
||||||
|
|
||||||
|
from reports import api
|
||||||
|
|
||||||
|
app_name = 'reports'
|
||||||
|
|
||||||
|
urlpatterns = [
|
||||||
|
path('reports/', api.ReportViewSet.as_view(), name='report-list'),
|
||||||
|
path('reports/users/', api.UserReportApi.as_view(), name='user-list'),
|
||||||
|
path('reports/user-change-password/', api.UserChangeSecretApi.as_view(), name='user-change-password'),
|
||||||
|
path('reports/asset-statistic/', api.AssetStatisticApi.as_view(), name='asset-statistic'),
|
||||||
|
path('reports/asset-activity/', api.AssetActivityApi.as_view(), name='asset-activity'),
|
||||||
|
path('reports/account-statistic/', api.AccountStatisticApi.as_view(), name='account-statistic'),
|
||||||
|
path('reports/account-automation/', api.AccountAutomationApi.as_view(), name='account-automation'),
|
||||||
|
]
|
|
@ -0,0 +1,15 @@
|
||||||
|
# ~*~ coding: utf-8 ~*~
|
||||||
|
from __future__ import unicode_literals
|
||||||
|
from django.urls import path
|
||||||
|
|
||||||
|
from .. import views
|
||||||
|
|
||||||
|
__all__ = ["urlpatterns"]
|
||||||
|
|
||||||
|
app_name = "reports"
|
||||||
|
|
||||||
|
urlpatterns = [
|
||||||
|
# Resource Task url
|
||||||
|
path('export-pdf/', views.ExportPdfView.as_view(), name='export-pdf'),
|
||||||
|
path('send-mail/', views.SendMailView.as_view(), name='send-mail'),
|
||||||
|
]
|
|
@ -0,0 +1,149 @@
|
||||||
|
import base64
|
||||||
|
import io
|
||||||
|
import urllib.parse
|
||||||
|
from io import BytesIO
|
||||||
|
from urllib.parse import urlparse
|
||||||
|
|
||||||
|
from django.conf import settings
|
||||||
|
from django.core.mail import EmailMultiAlternatives
|
||||||
|
from django.http import FileResponse, HttpResponseBadRequest, JsonResponse
|
||||||
|
from django.utils import timezone
|
||||||
|
from django.utils.decorators import method_decorator
|
||||||
|
from django.views import View
|
||||||
|
from django.views.decorators.csrf import csrf_exempt
|
||||||
|
from pdf2image import convert_from_bytes
|
||||||
|
from playwright.sync_api import sync_playwright
|
||||||
|
|
||||||
|
charts_map = {
|
||||||
|
"UserLoginActivity": {
|
||||||
|
"title": "用户登录报告",
|
||||||
|
"path": "/ui/#/reports/users/user-activity"
|
||||||
|
},
|
||||||
|
"UserPasswordChange": {
|
||||||
|
"title": "用户改密报告",
|
||||||
|
"path": "/ui/#/reports/users/change-password"
|
||||||
|
},
|
||||||
|
"AssetStatistics": {
|
||||||
|
"title": "资产统计报告",
|
||||||
|
"path": "/ui/#/reports/assets/asset-statistics"
|
||||||
|
},
|
||||||
|
"AssetReport": {
|
||||||
|
"title": "资产活动报告",
|
||||||
|
"path": "/ui/#/reports/assets/asset-activity"
|
||||||
|
},
|
||||||
|
"AccountStatistics": {
|
||||||
|
"title": "账号统计报告",
|
||||||
|
"path": "/ui/#/reports/accounts/account-statistics"
|
||||||
|
},
|
||||||
|
"AccountAutomationReport": {
|
||||||
|
"title": "账号自动化报告",
|
||||||
|
"path": "/ui/#/reports/accounts/account-automation"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
def export_chart_to_pdf(chart_name, sessionid, request=None):
|
||||||
|
chart_info = charts_map.get(chart_name)
|
||||||
|
if not chart_info:
|
||||||
|
return None, None
|
||||||
|
|
||||||
|
if request:
|
||||||
|
url = request.build_absolute_uri(urllib.parse.unquote(chart_info['path']))
|
||||||
|
else:
|
||||||
|
url = urllib.parse.unquote(chart_info['path'])
|
||||||
|
|
||||||
|
if settings.DEBUG_DEV:
|
||||||
|
url = url.replace(":8080", ":9528")
|
||||||
|
print("Url: ", url)
|
||||||
|
|
||||||
|
with sync_playwright() as p:
|
||||||
|
browser = p.chromium.launch(headless=True)
|
||||||
|
context = browser.new_context(viewport={"width": 1000, "height": 800})
|
||||||
|
# 设置 sessionid cookie
|
||||||
|
parsed_url = urlparse(url)
|
||||||
|
context.add_cookies([
|
||||||
|
{
|
||||||
|
'name': settings.SESSION_COOKIE_NAME,
|
||||||
|
'value': sessionid,
|
||||||
|
'domain': parsed_url.hostname,
|
||||||
|
'path': '/',
|
||||||
|
'httpOnly': True,
|
||||||
|
'secure': False, # 如有 https 可改 True
|
||||||
|
}
|
||||||
|
])
|
||||||
|
page = context.new_page()
|
||||||
|
try:
|
||||||
|
page.goto(url, wait_until='networkidle')
|
||||||
|
pdf_bytes = page.pdf(format="A4", landscape=True,
|
||||||
|
margin={"top": "35px", "bottom": "30px", "left": "20px", "right": "20px"})
|
||||||
|
except Exception as e:
|
||||||
|
print(f'Playwright error: {e}')
|
||||||
|
pdf_bytes = None
|
||||||
|
finally:
|
||||||
|
browser.close()
|
||||||
|
return pdf_bytes, chart_info['title']
|
||||||
|
|
||||||
|
|
||||||
|
@method_decorator(csrf_exempt, name='dispatch')
|
||||||
|
class ExportPdfView(View):
|
||||||
|
def get(self, request):
|
||||||
|
chart_name = request.GET.get('chart')
|
||||||
|
return self._handle_export(request, chart_name)
|
||||||
|
|
||||||
|
def post(self, request):
|
||||||
|
chart_name = request.POST.get('chart')
|
||||||
|
return self._handle_export(request, chart_name)
|
||||||
|
|
||||||
|
def _handle_export(self, request, chart_name):
|
||||||
|
if not chart_name:
|
||||||
|
return HttpResponseBadRequest('Missing chart parameter')
|
||||||
|
sessionid = request.COOKIES.get(settings.SESSION_COOKIE_NAME)
|
||||||
|
if not sessionid:
|
||||||
|
return HttpResponseBadRequest('No sessionid found in cookies')
|
||||||
|
|
||||||
|
pdf_bytes, title = export_chart_to_pdf(chart_name, sessionid, request=request)
|
||||||
|
if not pdf_bytes:
|
||||||
|
return HttpResponseBadRequest('Failed to generate PDF')
|
||||||
|
filename = f"{title}-{timezone.now().strftime('%Y%m%d%H%M%S')}.pdf"
|
||||||
|
response = FileResponse(io.BytesIO(pdf_bytes), as_attachment=True, filename=filename,
|
||||||
|
content_type='application/pdf')
|
||||||
|
return response
|
||||||
|
|
||||||
|
|
||||||
|
class SendMailView(View):
|
||||||
|
def get(self, request):
|
||||||
|
chart_name = request.GET.get('chart')
|
||||||
|
email = "ibuler@qq.com"
|
||||||
|
if not chart_name or not email:
|
||||||
|
return HttpResponseBadRequest('Missing chart or email parameter')
|
||||||
|
sessionid = request.COOKIES.get(settings.SESSION_COOKIE_NAME)
|
||||||
|
if not sessionid:
|
||||||
|
return HttpResponseBadRequest('No sessionid found in cookies')
|
||||||
|
|
||||||
|
# 1. 生成 PDF
|
||||||
|
pdf_bytes, title = export_chart_to_pdf(chart_name, sessionid, request=request)
|
||||||
|
if not pdf_bytes:
|
||||||
|
return HttpResponseBadRequest('Failed to generate PDF')
|
||||||
|
|
||||||
|
# 2. PDF 转图片
|
||||||
|
images = convert_from_bytes(pdf_bytes, dpi=200)
|
||||||
|
# 3. 图片转 base64
|
||||||
|
img_tags = []
|
||||||
|
for img in images:
|
||||||
|
buffer = BytesIO()
|
||||||
|
img.save(buffer, format="PNG")
|
||||||
|
encoded = base64.b64encode(buffer.getvalue()).decode("utf-8")
|
||||||
|
img_tags.append(f'<img src="data:image/png;base64,{encoded}" style="width:100%; max-width:800px;" />')
|
||||||
|
html_content = "<br/>".join(img_tags)
|
||||||
|
|
||||||
|
# 4. 发送邮件
|
||||||
|
subject = f"{title} 报表"
|
||||||
|
from_email = settings.EMAIL_HOST_USER
|
||||||
|
to = [email]
|
||||||
|
msg = EmailMultiAlternatives(subject, '', from_email, to)
|
||||||
|
msg.attach_alternative(html_content, "text/html")
|
||||||
|
filename = f"{title}-{timezone.now().strftime('%Y%m%d%H%M%S')}.pdf"
|
||||||
|
msg.attach(filename, pdf_bytes, "application/pdf")
|
||||||
|
msg.send()
|
||||||
|
|
||||||
|
return JsonResponse({"message": "邮件发送成功"})
|
|
@ -106,3 +106,14 @@ class SessionErrorReason(TextChoices):
|
||||||
replay_upload_failed = 'replay_upload_failed', _('Replay upload failed')
|
replay_upload_failed = 'replay_upload_failed', _('Replay upload failed')
|
||||||
replay_convert_failed = 'replay_convert_failed', _('Replay convert failed')
|
replay_convert_failed = 'replay_convert_failed', _('Replay convert failed')
|
||||||
replay_unsupported = 'replay_unsupported', _('Replay unsupported')
|
replay_unsupported = 'replay_unsupported', _('Replay unsupported')
|
||||||
|
|
||||||
|
|
||||||
|
class LoginFrom(TextChoices):
|
||||||
|
ST = 'ST', 'SSH Terminal'
|
||||||
|
RT = 'RT', 'RDP Terminal'
|
||||||
|
WT = 'WT', 'Web Terminal'
|
||||||
|
DT = 'DT', 'DB Terminal'
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def as_dict(cls):
|
||||||
|
return {choice.value: choice.label for choice in cls}
|
||||||
|
|
|
@ -18,18 +18,11 @@ from common.const.signals import OP_LOG_SKIP_SIGNAL
|
||||||
from common.utils import get_object_or_none, lazyproperty
|
from common.utils import get_object_or_none, lazyproperty
|
||||||
from orgs.mixins.models import OrgModelMixin
|
from orgs.mixins.models import OrgModelMixin
|
||||||
from terminal.backends import get_multi_command_storage
|
from terminal.backends import get_multi_command_storage
|
||||||
from terminal.const import SessionType, TerminalType
|
from terminal.const import SessionType, TerminalType, LoginFrom
|
||||||
from users.models import User
|
from users.models import User
|
||||||
|
|
||||||
|
|
||||||
class Session(OrgModelMixin):
|
class Session(OrgModelMixin):
|
||||||
class LOGIN_FROM(models.TextChoices):
|
|
||||||
ST = 'ST', 'SSH Terminal'
|
|
||||||
RT = 'RT', 'RDP Terminal'
|
|
||||||
WT = 'WT', 'Web Terminal'
|
|
||||||
DT = 'DT', 'DB Terminal'
|
|
||||||
VT = 'VT', 'VNC Terminal'
|
|
||||||
|
|
||||||
id = models.UUIDField(default=uuid.uuid4, primary_key=True)
|
id = models.UUIDField(default=uuid.uuid4, primary_key=True)
|
||||||
user = models.CharField(max_length=128, verbose_name=_("User"), db_index=True)
|
user = models.CharField(max_length=128, verbose_name=_("User"), db_index=True)
|
||||||
user_id = models.CharField(blank=True, default='', max_length=36, db_index=True)
|
user_id = models.CharField(blank=True, default='', max_length=36, db_index=True)
|
||||||
|
@ -38,7 +31,7 @@ class Session(OrgModelMixin):
|
||||||
account = models.CharField(max_length=128, verbose_name=_("Account"), db_index=True)
|
account = models.CharField(max_length=128, verbose_name=_("Account"), db_index=True)
|
||||||
account_id = models.CharField(max_length=128, verbose_name=_("Account ID"), db_index=True)
|
account_id = models.CharField(max_length=128, verbose_name=_("Account ID"), db_index=True)
|
||||||
protocol = models.CharField(default='ssh', max_length=16, db_index=True)
|
protocol = models.CharField(default='ssh', max_length=16, db_index=True)
|
||||||
login_from = models.CharField(max_length=2, choices=LOGIN_FROM.choices, default="ST", verbose_name=_("Login from"))
|
login_from = models.CharField(max_length=2, choices=LoginFrom.choices, default="ST", verbose_name=_("Login from"))
|
||||||
type = models.CharField(max_length=16, default='normal', db_index=True)
|
type = models.CharField(max_length=16, default='normal', db_index=True)
|
||||||
remote_addr = models.CharField(max_length=128, verbose_name=_("Remote addr"), blank=True, null=True)
|
remote_addr = models.CharField(max_length=128, verbose_name=_("Remote addr"), blank=True, null=True)
|
||||||
is_success = models.BooleanField(default=True, db_index=True)
|
is_success = models.BooleanField(default=True, db_index=True)
|
||||||
|
|
|
@ -2,15 +2,15 @@ import datetime
|
||||||
|
|
||||||
from django.db import models
|
from django.db import models
|
||||||
from django.utils import timezone
|
from django.utils import timezone
|
||||||
from django.utils.translation import gettext_lazy as _
|
|
||||||
from django.utils.functional import cached_property
|
from django.utils.functional import cached_property
|
||||||
|
from django.utils.translation import gettext_lazy as _
|
||||||
|
|
||||||
from common.db.models import JMSBaseModel
|
from common.db.models import JMSBaseModel
|
||||||
from common.utils import is_uuid
|
from common.utils import is_uuid
|
||||||
from orgs.mixins.models import OrgModelMixin
|
from orgs.mixins.models import OrgModelMixin
|
||||||
from orgs.utils import tmp_to_root_org
|
from orgs.utils import tmp_to_root_org
|
||||||
|
from terminal.const import LoginFrom
|
||||||
from users.models import User
|
from users.models import User
|
||||||
from .session import Session
|
|
||||||
|
|
||||||
__all__ = ['SessionSharing', 'SessionJoinRecord']
|
__all__ = ['SessionSharing', 'SessionJoinRecord']
|
||||||
|
|
||||||
|
@ -89,8 +89,6 @@ class SessionSharing(JMSBaseModel, OrgModelMixin):
|
||||||
|
|
||||||
|
|
||||||
class SessionJoinRecord(JMSBaseModel, OrgModelMixin):
|
class SessionJoinRecord(JMSBaseModel, OrgModelMixin):
|
||||||
LOGIN_FROM = Session.LOGIN_FROM
|
|
||||||
|
|
||||||
session = models.ForeignKey(
|
session = models.ForeignKey(
|
||||||
'terminal.Session', on_delete=models.CASCADE, verbose_name=_('Session')
|
'terminal.Session', on_delete=models.CASCADE, verbose_name=_('Session')
|
||||||
)
|
)
|
||||||
|
@ -114,7 +112,7 @@ class SessionJoinRecord(JMSBaseModel, OrgModelMixin):
|
||||||
db_index=True
|
db_index=True
|
||||||
)
|
)
|
||||||
login_from = models.CharField(
|
login_from = models.CharField(
|
||||||
max_length=2, choices=LOGIN_FROM.choices, default="WT",
|
max_length=2, choices=LoginFrom.choices, default="WT",
|
||||||
verbose_name=_("Login from")
|
verbose_name=_("Login from")
|
||||||
)
|
)
|
||||||
is_success = models.BooleanField(
|
is_success = models.BooleanField(
|
||||||
|
|
|
@ -25,6 +25,10 @@ class Source(models.TextChoices):
|
||||||
slack = "slack", _("Slack")
|
slack = "slack", _("Slack")
|
||||||
custom = "custom", "Custom"
|
custom = "custom", "Custom"
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def as_dict(cls):
|
||||||
|
return {choice.value: choice.label for choice in cls}
|
||||||
|
|
||||||
|
|
||||||
class SourceMixin:
|
class SourceMixin:
|
||||||
source: str
|
source: str
|
||||||
|
|
|
@ -91,7 +91,7 @@ dependencies = [
|
||||||
'django-proxy==1.2.2',
|
'django-proxy==1.2.2',
|
||||||
'python-daemon==3.0.1',
|
'python-daemon==3.0.1',
|
||||||
'eventlet==0.33.3',
|
'eventlet==0.33.3',
|
||||||
'greenlet==3.0.1',
|
'greenlet==3.1.1',
|
||||||
'gunicorn==23.0.0',
|
'gunicorn==23.0.0',
|
||||||
'celery==5.3.1',
|
'celery==5.3.1',
|
||||||
'flower==2.0.1',
|
'flower==2.0.1',
|
||||||
|
@ -152,6 +152,8 @@ dependencies = [
|
||||||
'botocore==1.31.9',
|
'botocore==1.31.9',
|
||||||
's3transfer==0.6.1',
|
's3transfer==0.6.1',
|
||||||
'xmlsec==1.3.14',
|
'xmlsec==1.3.14',
|
||||||
|
'playwright==1.53.0',
|
||||||
|
'pdf2image==1.17.0'
|
||||||
]
|
]
|
||||||
|
|
||||||
[project.urls]
|
[project.urls]
|
||||||
|
|
|
@ -5,16 +5,13 @@ PROJECT_DIR=$(dirname "$BASE_DIR")
|
||||||
echo "1. 安装依赖"
|
echo "1. 安装依赖"
|
||||||
brew install libtiff libjpeg webp little-cms2 openssl gettext git \
|
brew install libtiff libjpeg webp little-cms2 openssl gettext git \
|
||||||
git-lfs libxml2 libxmlsec1 pkg-config postgresql freetds openssl \
|
git-lfs libxml2 libxmlsec1 pkg-config postgresql freetds openssl \
|
||||||
libffi freerdp
|
libffi freerdp poppler
|
||||||
pip install daphne==4.0.0 channels channels-redis
|
pip install daphne==4.0.0 channels channels-redis
|
||||||
|
|
||||||
echo "2. 下载 IP 数据库"
|
echo "2. 下载 IP 数据库"
|
||||||
ip_db_path="${PROJECT_DIR}/apps/common/utils/geoip/GeoLite2-City.mmdb"
|
ip_db_path="${PROJECT_DIR}/apps/common/utils/geoip/GeoLite2-City.mmdb"
|
||||||
wget "https://download.jumpserver.org/files/GeoLite2-City.mmdb" -O "${ip_db_path}"
|
wget "https://download.jumpserver.org/files/GeoLite2-City.mmdb" -O "${ip_db_path}"
|
||||||
|
|
||||||
echo "3. 安装依赖的插件"
|
|
||||||
git lfs install
|
|
||||||
|
|
||||||
if ! uname -a | grep 'ARM64' &> /dev/null;then
|
if ! uname -a | grep 'ARM64' &> /dev/null;then
|
||||||
exit 0
|
exit 0
|
||||||
fi
|
fi
|
||||||
|
|
Loading…
Reference in New Issue