mirror of https://github.com/jumpserver/jumpserver
Merge branch 'v3' of github.com:jumpserver/jumpserver into v3
commit
211f560400
|
@ -7,7 +7,7 @@ from django.db import migrations, models
|
||||||
class Migration(migrations.Migration):
|
class Migration(migrations.Migration):
|
||||||
|
|
||||||
dependencies = [
|
dependencies = [
|
||||||
('authentication', '0014_auto_20221122_2152'),
|
('authentication', '0015_alter_connectiontoken_login'),
|
||||||
]
|
]
|
||||||
|
|
||||||
operations = [
|
operations = [
|
|
@ -6,7 +6,6 @@ from .hands import *
|
||||||
|
|
||||||
class Services(TextChoices):
|
class Services(TextChoices):
|
||||||
gunicorn = 'gunicorn', 'gunicorn'
|
gunicorn = 'gunicorn', 'gunicorn'
|
||||||
daphne = 'daphne', 'daphne'
|
|
||||||
celery_ansible = 'celery_ansible', 'celery_ansible'
|
celery_ansible = 'celery_ansible', 'celery_ansible'
|
||||||
celery_default = 'celery_default', 'celery_default'
|
celery_default = 'celery_default', 'celery_default'
|
||||||
beat = 'beat', 'beat'
|
beat = 'beat', 'beat'
|
||||||
|
@ -22,7 +21,6 @@ class Services(TextChoices):
|
||||||
from . import services
|
from . import services
|
||||||
services_map = {
|
services_map = {
|
||||||
cls.gunicorn.value: services.GunicornService,
|
cls.gunicorn.value: services.GunicornService,
|
||||||
cls.daphne: services.DaphneService,
|
|
||||||
cls.flower: services.FlowerService,
|
cls.flower: services.FlowerService,
|
||||||
cls.celery_default: services.CeleryDefaultService,
|
cls.celery_default: services.CeleryDefaultService,
|
||||||
cls.celery_ansible: services.CeleryAnsibleService,
|
cls.celery_ansible: services.CeleryAnsibleService,
|
||||||
|
@ -30,13 +28,9 @@ class Services(TextChoices):
|
||||||
}
|
}
|
||||||
return services_map.get(name)
|
return services_map.get(name)
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def ws_services(cls):
|
|
||||||
return [cls.daphne]
|
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def web_services(cls):
|
def web_services(cls):
|
||||||
return [cls.gunicorn, cls.daphne, cls.flower]
|
return [cls.gunicorn, cls.flower]
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def celery_services(cls):
|
def celery_services(cls):
|
||||||
|
|
|
@ -1,6 +1,5 @@
|
||||||
from .beat import *
|
from .beat import *
|
||||||
from .celery_ansible import *
|
from .celery_ansible import *
|
||||||
from .celery_default import *
|
from .celery_default import *
|
||||||
from .daphne import *
|
|
||||||
from .flower import *
|
from .flower import *
|
||||||
from .gunicorn import *
|
from .gunicorn import *
|
||||||
|
|
|
@ -1,25 +0,0 @@
|
||||||
from ..hands import *
|
|
||||||
from .base import BaseService
|
|
||||||
|
|
||||||
__all__ = ['DaphneService']
|
|
||||||
|
|
||||||
|
|
||||||
class DaphneService(BaseService):
|
|
||||||
|
|
||||||
def __init__(self, **kwargs):
|
|
||||||
super().__init__(**kwargs)
|
|
||||||
|
|
||||||
@property
|
|
||||||
def cmd(self):
|
|
||||||
print("\n- Start Daphne ASGI WS Server")
|
|
||||||
|
|
||||||
cmd = [
|
|
||||||
'daphne', 'jumpserver.asgi:application',
|
|
||||||
'-b', HTTP_HOST,
|
|
||||||
'-p', str(WS_PORT),
|
|
||||||
]
|
|
||||||
return cmd
|
|
||||||
|
|
||||||
@property
|
|
||||||
def cwd(self):
|
|
||||||
return APPS_DIR
|
|
|
@ -17,9 +17,9 @@ class GunicornService(BaseService):
|
||||||
log_format = '%(h)s %(t)s %(L)ss "%(r)s" %(s)s %(b)s '
|
log_format = '%(h)s %(t)s %(L)ss "%(r)s" %(s)s %(b)s '
|
||||||
bind = f'{HTTP_HOST}:{HTTP_PORT}'
|
bind = f'{HTTP_HOST}:{HTTP_PORT}'
|
||||||
cmd = [
|
cmd = [
|
||||||
'gunicorn', 'jumpserver.wsgi',
|
'gunicorn', 'jumpserver.asgi:application',
|
||||||
'-b', bind,
|
'-b', bind,
|
||||||
'-k', 'gthread',
|
'-k', 'uvicorn.workers.UvicornWorker',
|
||||||
'--threads', '10',
|
'--threads', '10',
|
||||||
'-w', str(self.worker),
|
'-w', str(self.worker),
|
||||||
'--max-requests', '4096',
|
'--max-requests', '4096',
|
||||||
|
|
|
@ -1,22 +1,21 @@
|
||||||
import datetime
|
|
||||||
|
|
||||||
import pytz
|
import pytz
|
||||||
|
from datetime import datetime, timedelta, timezone
|
||||||
from django.utils import timezone as dj_timezone
|
from django.utils import timezone as dj_timezone
|
||||||
from rest_framework.fields import DateTimeField
|
from rest_framework.fields import DateTimeField
|
||||||
|
|
||||||
max = datetime.datetime.max.replace(tzinfo=datetime.timezone.utc)
|
max = datetime.max.replace(tzinfo=timezone.utc)
|
||||||
|
|
||||||
|
|
||||||
def astimezone(dt: datetime.datetime, tzinfo: pytz.tzinfo.DstTzInfo):
|
def astimezone(dt: datetime, tzinfo: pytz.tzinfo.DstTzInfo):
|
||||||
assert dj_timezone.is_aware(dt)
|
assert dj_timezone.is_aware(dt)
|
||||||
return tzinfo.normalize(dt.astimezone(tzinfo))
|
return tzinfo.normalize(dt.astimezone(tzinfo))
|
||||||
|
|
||||||
|
|
||||||
def as_china_cst(dt: datetime.datetime):
|
def as_china_cst(dt: datetime):
|
||||||
return astimezone(dt, pytz.timezone('Asia/Shanghai'))
|
return astimezone(dt, pytz.timezone('Asia/Shanghai'))
|
||||||
|
|
||||||
|
|
||||||
def as_current_tz(dt: datetime.datetime):
|
def as_current_tz(dt: datetime):
|
||||||
return astimezone(dt, dj_timezone.get_current_timezone())
|
return astimezone(dt, dj_timezone.get_current_timezone())
|
||||||
|
|
||||||
|
|
||||||
|
@ -36,6 +35,15 @@ def local_now_date_display(fmt='%Y-%m-%d'):
|
||||||
return local_now().strftime(fmt)
|
return local_now().strftime(fmt)
|
||||||
|
|
||||||
|
|
||||||
|
def local_zero_hour(fmt='%Y-%m-%d'):
|
||||||
|
return datetime.strptime(local_now().strftime(fmt), fmt)
|
||||||
|
|
||||||
|
|
||||||
|
def local_monday():
|
||||||
|
zero_hour_time = local_zero_hour()
|
||||||
|
return zero_hour_time - timedelta(zero_hour_time.weekday())
|
||||||
|
|
||||||
|
|
||||||
_rest_dt_field = DateTimeField()
|
_rest_dt_field = DateTimeField()
|
||||||
dt_parser = _rest_dt_field.to_internal_value
|
dt_parser = _rest_dt_field.to_internal_value
|
||||||
dt_formatter = _rest_dt_field.to_representation
|
dt_formatter = _rest_dt_field.to_representation
|
||||||
|
|
|
@ -3,45 +3,51 @@ import time
|
||||||
from django.core.cache import cache
|
from django.core.cache import cache
|
||||||
from django.utils import timezone
|
from django.utils import timezone
|
||||||
from django.utils.timesince import timesince
|
from django.utils.timesince import timesince
|
||||||
from django.db.models import Count, Max
|
from django.db.models import Count, Max, F
|
||||||
from django.http.response import JsonResponse, HttpResponse
|
from django.http.response import JsonResponse, HttpResponse
|
||||||
from rest_framework.views import APIView
|
from rest_framework.views import APIView
|
||||||
from rest_framework.permissions import AllowAny
|
from rest_framework.permissions import AllowAny
|
||||||
from collections import Counter
|
from rest_framework.request import Request
|
||||||
from rest_framework.response import Response
|
from rest_framework.response import Response
|
||||||
|
|
||||||
from users.models import User
|
from users.models import User
|
||||||
from assets.models import Asset
|
from assets.models import Asset
|
||||||
|
from assets.const import AllTypes
|
||||||
from terminal.models import Session
|
from terminal.models import Session
|
||||||
from terminal.utils import ComponentsPrometheusMetricsUtil
|
from terminal.utils import ComponentsPrometheusMetricsUtil
|
||||||
from orgs.utils import current_org
|
from orgs.utils import current_org
|
||||||
from common.utils import lazyproperty
|
from common.utils import lazyproperty
|
||||||
|
from common.utils.timezone import local_now, local_zero_hour
|
||||||
from orgs.caches import OrgResourceStatisticsCache
|
from orgs.caches import OrgResourceStatisticsCache
|
||||||
|
|
||||||
|
|
||||||
__all__ = ['IndexApi']
|
__all__ = ['IndexApi']
|
||||||
|
|
||||||
|
|
||||||
class DatesLoginMetricMixin:
|
class DatesLoginMetricMixin:
|
||||||
|
request: Request
|
||||||
|
|
||||||
@lazyproperty
|
@lazyproperty
|
||||||
def days(self):
|
def days(self):
|
||||||
query_params = self.request.query_params
|
query_params = self.request.query_params
|
||||||
if query_params.get('monthly'):
|
# monthly
|
||||||
return 30
|
count = query_params.get('days')
|
||||||
return 7
|
return count if count else 0
|
||||||
|
|
||||||
@lazyproperty
|
@lazyproperty
|
||||||
def sessions_queryset(self):
|
def sessions_queryset(self):
|
||||||
days = timezone.now() - timezone.timedelta(days=self.days)
|
days = self.days
|
||||||
sessions_queryset = Session.objects.filter(date_start__gt=days)
|
if days == 0:
|
||||||
|
t = local_zero_hour()
|
||||||
|
else:
|
||||||
|
t = local_now() - timezone.timedelta(days=days)
|
||||||
|
sessions_queryset = Session.objects.filter(date_start__gte=t)
|
||||||
return sessions_queryset
|
return sessions_queryset
|
||||||
|
|
||||||
@lazyproperty
|
@lazyproperty
|
||||||
def session_dates_list(self):
|
def session_dates_list(self):
|
||||||
now = timezone.now()
|
now = local_now()
|
||||||
dates = [(now - timezone.timedelta(days=i)).date() for i in range(self.days)]
|
dates = [(now - timezone.timedelta(days=i)).date() for i in range(self.days)]
|
||||||
dates.reverse()
|
dates.reverse()
|
||||||
# dates = self.sessions_queryset.dates('date_start', 'day')
|
|
||||||
return dates
|
return dates
|
||||||
|
|
||||||
def get_dates_metrics_date(self):
|
def get_dates_metrics_date(self):
|
||||||
|
@ -63,7 +69,7 @@ class DatesLoginMetricMixin:
|
||||||
|
|
||||||
def __set_data_to_cache(self, date, tp, count):
|
def __set_data_to_cache(self, date, tp, count):
|
||||||
cache_key = self.get_cache_key(date, tp)
|
cache_key = self.get_cache_key(date, tp)
|
||||||
cache.set(cache_key, count, 3600*24*7)
|
cache.set(cache_key, count, 3600 * 24 * 7)
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def get_date_start_2_end(d):
|
def get_date_start_2_end(d):
|
||||||
|
@ -162,40 +168,45 @@ class DatesLoginMetricMixin:
|
||||||
def dates_total_count_disabled_assets(self):
|
def dates_total_count_disabled_assets(self):
|
||||||
return Asset.objects.filter(is_active=False).count()
|
return Asset.objects.filter(is_active=False).count()
|
||||||
|
|
||||||
# 以下是从week中而来
|
|
||||||
def get_dates_login_times_top5_users(self):
|
|
||||||
users = self.sessions_queryset.values_list('user_id', flat=True)
|
|
||||||
users = [
|
|
||||||
{'user': user, 'total': total}
|
|
||||||
for user, total in Counter(users).most_common(5)
|
|
||||||
]
|
|
||||||
return users
|
|
||||||
|
|
||||||
def get_dates_total_count_login_users(self):
|
def get_dates_total_count_login_users(self):
|
||||||
return len(set(self.sessions_queryset.values_list('user_id', flat=True)))
|
return len(set(self.sessions_queryset.values_list('user_id', flat=True)))
|
||||||
|
|
||||||
def get_dates_total_count_login_times(self):
|
def get_dates_total_count_login_times(self):
|
||||||
return self.sessions_queryset.count()
|
return self.sessions_queryset.count()
|
||||||
|
|
||||||
def get_dates_login_times_top10_assets(self):
|
@lazyproperty
|
||||||
|
def get_type_to_assets(self):
|
||||||
|
result = Asset.objects.annotate(type=F('platform__type')). \
|
||||||
|
values('type').order_by('type').annotate(total=Count(1))
|
||||||
|
all_types_dict = dict(AllTypes.choices())
|
||||||
|
result = list(result)
|
||||||
|
for i in result:
|
||||||
|
tp = i['type']
|
||||||
|
i['label'] = all_types_dict[tp]
|
||||||
|
return result
|
||||||
|
|
||||||
|
def get_dates_login_times_assets(self):
|
||||||
assets = self.sessions_queryset.values("asset") \
|
assets = self.sessions_queryset.values("asset") \
|
||||||
.annotate(total=Count("asset")) \
|
.annotate(total=Count("asset")) \
|
||||||
.annotate(last=Max("date_start")).order_by("-total")[:10]
|
.annotate(last=Max("date_start")).order_by("-total")
|
||||||
|
assets = assets[:10]
|
||||||
for asset in assets:
|
for asset in assets:
|
||||||
asset['last'] = str(asset['last'])
|
asset['last'] = str(asset['last'])
|
||||||
return list(assets)
|
return list(assets)
|
||||||
|
|
||||||
def get_dates_login_times_top10_users(self):
|
def get_dates_login_times_users(self):
|
||||||
users = self.sessions_queryset.values("user_id") \
|
users = self.sessions_queryset.values("user_id") \
|
||||||
.annotate(total=Count("user_id")) \
|
.annotate(total=Count("user_id")) \
|
||||||
.annotate(user=Max('user')) \
|
.annotate(user=Max('user')) \
|
||||||
.annotate(last=Max("date_start")).order_by("-total")[:10]
|
.annotate(last=Max("date_start")).order_by("-total")
|
||||||
|
users = users[:10]
|
||||||
for user in users:
|
for user in users:
|
||||||
user['last'] = str(user['last'])
|
user['last'] = str(user['last'])
|
||||||
return list(users)
|
return list(users)
|
||||||
|
|
||||||
def get_dates_login_record_top10_sessions(self):
|
def get_dates_login_record_sessions(self):
|
||||||
sessions = self.sessions_queryset.order_by('-date_start')[:10]
|
sessions = self.sessions_queryset.order_by('-date_start')
|
||||||
|
sessions = sessions[:10]
|
||||||
for session in sessions:
|
for session in sessions:
|
||||||
session.avatar_url = User.get_avatar_url("")
|
session.avatar_url = User.get_avatar_url("")
|
||||||
sessions = [
|
sessions = [
|
||||||
|
@ -229,11 +240,13 @@ class IndexApi(DatesLoginMetricMixin, APIView):
|
||||||
if _all or query_params.get('total_count') or query_params.get('total_count_users'):
|
if _all or query_params.get('total_count') or query_params.get('total_count_users'):
|
||||||
data.update({
|
data.update({
|
||||||
'total_count_users': caches.users_amount,
|
'total_count_users': caches.users_amount,
|
||||||
|
'total_count_users_this_week': caches.new_users_amount_this_week,
|
||||||
})
|
})
|
||||||
|
|
||||||
if _all or query_params.get('total_count') or query_params.get('total_count_assets'):
|
if _all or query_params.get('total_count') or query_params.get('total_count_assets'):
|
||||||
data.update({
|
data.update({
|
||||||
'total_count_assets': caches.assets_amount,
|
'total_count_assets': caches.assets_amount,
|
||||||
|
'total_count_assets_this_week': caches.new_assets_amount_this_week,
|
||||||
})
|
})
|
||||||
|
|
||||||
if _all or query_params.get('total_count') or query_params.get('total_count_online_users'):
|
if _all or query_params.get('total_count') or query_params.get('total_count_online_users'):
|
||||||
|
@ -246,6 +259,23 @@ class IndexApi(DatesLoginMetricMixin, APIView):
|
||||||
'total_count_online_sessions': caches.total_count_online_sessions,
|
'total_count_online_sessions': caches.total_count_online_sessions,
|
||||||
})
|
})
|
||||||
|
|
||||||
|
if _all or query_params.get('total_count') or query_params.get('total_count_today_failed_sessions'):
|
||||||
|
data.update({
|
||||||
|
'total_count_today_failed_sessions': caches.total_count_today_failed_sessions,
|
||||||
|
})
|
||||||
|
if _all or query_params.get('total_count') or query_params.get('total_count_today_login_users'):
|
||||||
|
data.update({
|
||||||
|
'total_count_today_login_users': caches.total_count_today_login_users,
|
||||||
|
})
|
||||||
|
if _all or query_params.get('total_count') or query_params.get('total_count_today_active_assets'):
|
||||||
|
data.update({
|
||||||
|
'total_count_today_active_assets': caches.total_count_today_active_assets,
|
||||||
|
})
|
||||||
|
if _all or query_params.get('total_count') or query_params.get('total_count_type_to_assets_amount'):
|
||||||
|
data.update({
|
||||||
|
'total_count_type_to_assets_amount': self.get_type_to_assets,
|
||||||
|
})
|
||||||
|
|
||||||
if _all or query_params.get('dates_metrics'):
|
if _all or query_params.get('dates_metrics'):
|
||||||
data.update({
|
data.update({
|
||||||
'dates_metrics_date': self.get_dates_metrics_date(),
|
'dates_metrics_date': self.get_dates_metrics_date(),
|
||||||
|
@ -274,24 +304,19 @@ class IndexApi(DatesLoginMetricMixin, APIView):
|
||||||
'dates_total_count_login_times': self.get_dates_total_count_login_times(),
|
'dates_total_count_login_times': self.get_dates_total_count_login_times(),
|
||||||
})
|
})
|
||||||
|
|
||||||
if _all or query_params.get('dates_login_times_top5_users'):
|
|
||||||
data.update({
|
|
||||||
'dates_login_times_top5_users': self.get_dates_login_times_top5_users(),
|
|
||||||
})
|
|
||||||
|
|
||||||
if _all or query_params.get('dates_login_times_top10_assets'):
|
if _all or query_params.get('dates_login_times_top10_assets'):
|
||||||
data.update({
|
data.update({
|
||||||
'dates_login_times_top10_assets': self.get_dates_login_times_top10_assets(),
|
'dates_login_times_top10_assets': self.get_dates_login_times_assets(),
|
||||||
})
|
})
|
||||||
|
|
||||||
if _all or query_params.get('dates_login_times_top10_users'):
|
if _all or query_params.get('dates_login_times_top10_users'):
|
||||||
data.update({
|
data.update({
|
||||||
'dates_login_times_top10_users': self.get_dates_login_times_top10_users(),
|
'dates_login_times_top10_users': self.get_dates_login_times_users(),
|
||||||
})
|
})
|
||||||
|
|
||||||
if _all or query_params.get('dates_login_record_top10_sessions'):
|
if _all or query_params.get('dates_login_record_top10_sessions'):
|
||||||
data.update({
|
data.update({
|
||||||
'dates_login_record_top10_sessions': self.get_dates_login_record_top10_sessions()
|
'dates_login_record_top10_sessions': self.get_dates_login_record_sessions()
|
||||||
})
|
})
|
||||||
|
|
||||||
return JsonResponse(data, status=200)
|
return JsonResponse(data, status=200)
|
||||||
|
@ -353,4 +378,3 @@ class PrometheusMetricsApi(HealthApiMixin):
|
||||||
util = ComponentsPrometheusMetricsUtil()
|
util = ComponentsPrometheusMetricsUtil()
|
||||||
metrics_text = util.get_prometheus_metrics_text()
|
metrics_text = util.get_prometheus_metrics_text()
|
||||||
return HttpResponse(metrics_text, content_type='text/plain; version=0.0.4; charset=utf-8')
|
return HttpResponse(metrics_text, content_type='text/plain; version=0.0.4; charset=utf-8')
|
||||||
|
|
||||||
|
|
|
@ -16,7 +16,9 @@ class JobViewSet(OrgBulkModelViewSet):
|
||||||
|
|
||||||
def get_queryset(self):
|
def get_queryset(self):
|
||||||
query_set = super().get_queryset()
|
query_set = super().get_queryset()
|
||||||
return query_set.filter(instant=False)
|
if self.action != 'retrieve':
|
||||||
|
return query_set.filter(instant=False)
|
||||||
|
return query_set
|
||||||
|
|
||||||
def perform_create(self, serializer):
|
def perform_create(self, serializer):
|
||||||
instance = serializer.save()
|
instance = serializer.save()
|
||||||
|
|
|
@ -91,6 +91,9 @@ class Job(JMSOrgBaseModel, PeriodTaskModelMixin):
|
||||||
def create_execution(self):
|
def create_execution(self):
|
||||||
return self.executions.create()
|
return self.executions.create()
|
||||||
|
|
||||||
|
class Meta:
|
||||||
|
ordering = ['date_created']
|
||||||
|
|
||||||
|
|
||||||
class JobExecution(JMSOrgBaseModel):
|
class JobExecution(JMSOrgBaseModel):
|
||||||
id = models.UUIDField(default=uuid.uuid4, primary_key=True)
|
id = models.UUIDField(default=uuid.uuid4, primary_key=True)
|
||||||
|
@ -198,3 +201,6 @@ class JobExecution(JMSOrgBaseModel):
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
logging.error(e, exc_info=True)
|
logging.error(e, exc_info=True)
|
||||||
self.set_error(e)
|
self.set_error(e)
|
||||||
|
|
||||||
|
class Meta:
|
||||||
|
ordering = ['-date_created']
|
||||||
|
|
|
@ -1,11 +1,14 @@
|
||||||
from django.db.transaction import on_commit
|
from django.db.transaction import on_commit
|
||||||
|
|
||||||
from orgs.models import Organization
|
from orgs.models import Organization
|
||||||
from orgs.tasks import refresh_org_cache_task
|
from orgs.tasks import refresh_org_cache_task
|
||||||
from orgs.utils import current_org, tmp_to_org
|
from orgs.utils import current_org, tmp_to_org
|
||||||
|
|
||||||
from common.cache import Cache, IntegerField
|
from common.cache import Cache, IntegerField
|
||||||
from common.utils import get_logger
|
from common.utils import get_logger
|
||||||
|
from common.utils.timezone import local_zero_hour, local_monday
|
||||||
from users.models import UserGroup, User
|
from users.models import UserGroup, User
|
||||||
|
from audits.models import UserLoginLog
|
||||||
|
from audits.const import LoginStatusChoices
|
||||||
from assets.models import Node, Domain, Asset, Account
|
from assets.models import Node, Domain, Asset, Account
|
||||||
from terminal.models import Session
|
from terminal.models import Session
|
||||||
from perms.models import AssetPermission
|
from perms.models import AssetPermission
|
||||||
|
@ -35,30 +38,35 @@ class OrgRelatedCache(Cache):
|
||||||
"""
|
"""
|
||||||
在事务提交之后再发送信号,防止因事务的隔离性导致未获得最新的数据
|
在事务提交之后再发送信号,防止因事务的隔离性导致未获得最新的数据
|
||||||
"""
|
"""
|
||||||
|
|
||||||
def func():
|
def func():
|
||||||
logger.debug(f'CACHE: Send refresh task {self}.{fields}')
|
logger.debug(f'CACHE: Send refresh task {self}.{fields}')
|
||||||
refresh_org_cache_task.delay(self, *fields)
|
refresh_org_cache_task.delay(self, *fields)
|
||||||
|
|
||||||
on_commit(func)
|
on_commit(func)
|
||||||
|
|
||||||
def expire(self, *fields):
|
def expire(self, *fields):
|
||||||
def func():
|
def func():
|
||||||
super(OrgRelatedCache, self).expire(*fields)
|
super(OrgRelatedCache, self).expire(*fields)
|
||||||
|
|
||||||
on_commit(func)
|
on_commit(func)
|
||||||
|
|
||||||
|
|
||||||
class OrgResourceStatisticsCache(OrgRelatedCache):
|
class OrgResourceStatisticsCache(OrgRelatedCache):
|
||||||
users_amount = IntegerField()
|
users_amount = IntegerField()
|
||||||
groups_amount = IntegerField(queryset=UserGroup.objects)
|
|
||||||
|
|
||||||
assets_amount = IntegerField()
|
assets_amount = IntegerField()
|
||||||
|
new_users_amount_this_week = IntegerField()
|
||||||
|
new_assets_amount_this_week = IntegerField()
|
||||||
nodes_amount = IntegerField(queryset=Node.objects)
|
nodes_amount = IntegerField(queryset=Node.objects)
|
||||||
accounts_amount = IntegerField(queryset=Account.objects)
|
|
||||||
domains_amount = IntegerField(queryset=Domain.objects)
|
domains_amount = IntegerField(queryset=Domain.objects)
|
||||||
# gateways_amount = IntegerField(queryset=Gateway.objects)
|
groups_amount = IntegerField(queryset=UserGroup.objects)
|
||||||
|
accounts_amount = IntegerField(queryset=Account.objects)
|
||||||
asset_perms_amount = IntegerField(queryset=AssetPermission.objects)
|
asset_perms_amount = IntegerField(queryset=AssetPermission.objects)
|
||||||
|
|
||||||
total_count_online_users = IntegerField()
|
total_count_online_users = IntegerField()
|
||||||
total_count_online_sessions = IntegerField()
|
total_count_online_sessions = IntegerField()
|
||||||
|
total_count_today_login_users = IntegerField()
|
||||||
|
total_count_today_active_assets = IntegerField()
|
||||||
|
total_count_today_failed_sessions = IntegerField()
|
||||||
|
|
||||||
def __init__(self, org):
|
def __init__(self, org):
|
||||||
super().__init__()
|
super().__init__()
|
||||||
|
@ -70,18 +78,59 @@ class OrgResourceStatisticsCache(OrgRelatedCache):
|
||||||
def get_current_org(self):
|
def get_current_org(self):
|
||||||
return self.org
|
return self.org
|
||||||
|
|
||||||
|
def get_users(self):
|
||||||
|
return User.get_org_users(self.org)
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def get_assets():
|
||||||
|
return Asset.objects.all()
|
||||||
|
|
||||||
def compute_users_amount(self):
|
def compute_users_amount(self):
|
||||||
amount = User.get_org_users(self.org).count()
|
users = self.get_users()
|
||||||
return amount
|
return users.count()
|
||||||
|
|
||||||
|
def compute_new_users_amount_this_week(self):
|
||||||
|
monday_time = local_monday()
|
||||||
|
users = self.get_users().filter(date_joined__gte=monday_time)
|
||||||
|
return users.count()
|
||||||
|
|
||||||
def compute_assets_amount(self):
|
def compute_assets_amount(self):
|
||||||
if self.org.is_root():
|
assets = self.get_assets()
|
||||||
return Asset.objects.all().count()
|
return assets.count()
|
||||||
node = Node.org_root()
|
|
||||||
return node.assets_amount
|
|
||||||
|
|
||||||
def compute_total_count_online_users(self):
|
def compute_new_assets_amount_this_week(self):
|
||||||
return Session.objects.filter(is_finished=False).values_list('user_id').distinct().count()
|
monday_time = local_monday()
|
||||||
|
assets = self.get_assets().filter(date_created__gte=monday_time)
|
||||||
|
return assets.count()
|
||||||
|
|
||||||
def compute_total_count_online_sessions(self):
|
@staticmethod
|
||||||
|
def compute_total_count_online_users():
|
||||||
|
return Session.objects.filter(
|
||||||
|
is_finished=False
|
||||||
|
).values_list('user_id').distinct().count()
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def compute_total_count_online_sessions():
|
||||||
return Session.objects.filter(is_finished=False).count()
|
return Session.objects.filter(is_finished=False).count()
|
||||||
|
|
||||||
|
def compute_total_count_today_login_users(self):
|
||||||
|
t = local_zero_hour()
|
||||||
|
user_login_logs = UserLoginLog.objects.filter(
|
||||||
|
datetime__gte=t, status=LoginStatusChoices.success
|
||||||
|
)
|
||||||
|
if not self.org.is_root():
|
||||||
|
usernames = self.org.get_members().values('username')
|
||||||
|
user_login_logs = user_login_logs.filter(username__in=usernames)
|
||||||
|
return user_login_logs.values('username').distinct().count()
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def compute_total_count_today_active_assets():
|
||||||
|
t = local_zero_hour()
|
||||||
|
return Session.objects.filter(
|
||||||
|
date_start__gte=t, is_success=False
|
||||||
|
).values('asset_id').distinct().count()
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def compute_total_count_today_failed_sessions():
|
||||||
|
t = local_zero_hour()
|
||||||
|
return Session.objects.filter(date_start__gte=t, is_success=False).count()
|
||||||
|
|
|
@ -2,8 +2,9 @@ from django.db.models.signals import post_save, pre_delete, pre_save, post_delet
|
||||||
from django.dispatch import receiver
|
from django.dispatch import receiver
|
||||||
|
|
||||||
from orgs.models import Organization
|
from orgs.models import Organization
|
||||||
from assets.models import Node
|
from assets.models import Node, Account
|
||||||
from perms.models import AssetPermission
|
from perms.models import AssetPermission
|
||||||
|
from audits.models import UserLoginLog
|
||||||
from users.models import UserGroup, User
|
from users.models import UserGroup, User
|
||||||
from users.signals import pre_user_leave_org
|
from users.signals import pre_user_leave_org
|
||||||
from terminal.models import Session
|
from terminal.models import Session
|
||||||
|
@ -74,12 +75,14 @@ def on_user_delete_refresh_cache(sender, instance, **kwargs):
|
||||||
|
|
||||||
class OrgResourceStatisticsRefreshUtil:
|
class OrgResourceStatisticsRefreshUtil:
|
||||||
model_cache_field_mapper = {
|
model_cache_field_mapper = {
|
||||||
AssetPermission: ['asset_perms_amount'],
|
|
||||||
Domain: ['domains_amount'],
|
|
||||||
Node: ['nodes_amount'],
|
Node: ['nodes_amount'],
|
||||||
Asset: ['assets_amount'],
|
Domain: ['domains_amount'],
|
||||||
UserGroup: ['groups_amount'],
|
UserGroup: ['groups_amount'],
|
||||||
RoleBinding: ['users_amount']
|
Account: ['accounts_amount'],
|
||||||
|
RoleBinding: ['users_amount', 'new_users_amount_this_week'],
|
||||||
|
Asset: ['assets_amount', 'new_assets_amount_this_week'],
|
||||||
|
AssetPermission: ['asset_perms_amount'],
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
|
@ -88,7 +91,7 @@ class OrgResourceStatisticsRefreshUtil:
|
||||||
if not cache_field_name:
|
if not cache_field_name:
|
||||||
return
|
return
|
||||||
OrgResourceStatisticsCache(Organization.root()).expire(*cache_field_name)
|
OrgResourceStatisticsCache(Organization.root()).expire(*cache_field_name)
|
||||||
if instance.org:
|
if getattr(instance, 'org', None):
|
||||||
OrgResourceStatisticsCache(instance.org).expire(*cache_field_name)
|
OrgResourceStatisticsCache(instance.org).expire(*cache_field_name)
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -86,8 +86,6 @@ pytz==2022.1
|
||||||
# Runtime
|
# Runtime
|
||||||
django-proxy==1.2.1
|
django-proxy==1.2.1
|
||||||
channels-redis==3.4.0
|
channels-redis==3.4.0
|
||||||
channels==3.0.4
|
|
||||||
daphne==3.0.2
|
|
||||||
python-daemon==2.3.0
|
python-daemon==2.3.0
|
||||||
eventlet==0.33.1
|
eventlet==0.33.1
|
||||||
greenlet==1.1.2
|
greenlet==1.1.2
|
||||||
|
@ -96,6 +94,8 @@ celery==5.2.7
|
||||||
flower==1.0.0
|
flower==1.0.0
|
||||||
django-celery-beat==2.3.0
|
django-celery-beat==2.3.0
|
||||||
kombu==5.2.4
|
kombu==5.2.4
|
||||||
|
uvicorn==0.20.0
|
||||||
|
websockets==10.4
|
||||||
# Auth
|
# Auth
|
||||||
python-ldap==3.4.0
|
python-ldap==3.4.0
|
||||||
ldap3==2.9.1
|
ldap3==2.9.1
|
||||||
|
|
Loading…
Reference in New Issue