From 2782d4b5f15304d8422540e95622a0b9be8cc07a Mon Sep 17 00:00:00 2001 From: Bai Date: Thu, 18 Apr 2024 21:18:31 +0800 Subject: [PATCH 01/14] =?UTF-8?q?fix:=20=E4=BF=AE=E5=A4=8D=20Celery=20Exec?= =?UTF-8?q?ution=20=E4=BB=BB=E5=8A=A1=E4=BF=9D=E5=AD=98=E5=A4=B1=E8=B4=A5?= =?UTF-8?q?=E5=AF=BC=E8=87=B4=20View=20=E4=BA=8B=E5=8A=A1=E5=9B=9E?= =?UTF-8?q?=E6=BB=9A=E7=9A=84=E9=97=AE=E9=A2=98=EF=BC=88=E9=A6=96=E6=AC=A1?= =?UTF-8?q?=E7=99=BB=E5=BD=95=E7=94=A8=E6=88=B7=E4=BF=AE=E6=94=B9=E5=AF=86?= =?UTF-8?q?=E7=A0=81=E5=A4=B1=E8=B4=A5=EF=BC=89?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- apps/ops/signal_handlers.py | 15 +++++++++------ 1 file changed, 9 insertions(+), 6 deletions(-) diff --git a/apps/ops/signal_handlers.py b/apps/ops/signal_handlers.py index 6fb2ced2d..ad3428100 100644 --- a/apps/ops/signal_handlers.py +++ b/apps/ops/signal_handlers.py @@ -134,9 +134,10 @@ def task_sent_handler(headers=None, body=None, **kwargs): args, kwargs, __ = body try: - args = list(args) + args = json.loads(json.dumps(list(args), cls=JSONEncoder)) kwargs = json.loads(json.dumps(kwargs, cls=JSONEncoder)) except Exception as e: + logger.error('Parse task args or kwargs error (Need handle): {}'.format(e)) args = [] kwargs = {} @@ -151,11 +152,13 @@ def task_sent_handler(headers=None, body=None, **kwargs): request = get_current_request() if request and request.user.is_authenticated: data['creator'] = request.user - try: - CeleryTaskExecution.objects.create(**data) - except Exception as e: - logger.error(e) - CeleryTask.objects.filter(name=task).update(date_last_publish=timezone.now()) + + with transaction.atomic(): + try: + CeleryTaskExecution.objects.create(**data) + except Exception as e: + logger.error('Create celery task execution error: {}'.format(e)) + CeleryTask.objects.filter(name=task).update(date_last_publish=timezone.now()) @receiver(django_ready) From c0273dc698fbecfa62954e25d8248debe4118ddd Mon Sep 17 00:00:00 2001 From: ibuler Date: Thu, 18 Apr 2024 21:31:15 +0800 Subject: [PATCH 02/14] =?UTF-8?q?perf:=20=E5=8E=BB=E6=8E=89=20js=20?= =?UTF-8?q?=E6=8A=A5=E9=94=99?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- apps/jumpserver/settings/logging.py | 2 +- apps/templates/_foot_js.html | 5 ++++- 2 files changed, 5 insertions(+), 2 deletions(-) diff --git a/apps/jumpserver/settings/logging.py b/apps/jumpserver/settings/logging.py index bd44e22dc..c0e873290 100644 --- a/apps/jumpserver/settings/logging.py +++ b/apps/jumpserver/settings/logging.py @@ -17,7 +17,7 @@ LOGGING = { 'disable_existing_loggers': False, 'formatters': { 'verbose': { - 'format': '%(levelname)s %(asctime)s %(module)s %(process)d %(thread)d %(message)s' + 'format': '%(levelname)s %(asctime)s %(pathname)s:%(lineno)d %(message)s' }, 'main': { 'datefmt': '%Y-%m-%d %H:%M:%S', diff --git a/apps/templates/_foot_js.html b/apps/templates/_foot_js.html index 0f6bf03c0..b01be189d 100644 --- a/apps/templates/_foot_js.html +++ b/apps/templates/_foot_js.html @@ -42,7 +42,10 @@ $.fn.select2.defaults.set('language', getUserLang()) const md = window.markdownit(); const markdownContent = document.querySelector('script[type="text/markdown"]').textContent; - document.getElementById('markdown-output').innerHTML = md.render(markdownContent); + const markdownRef = document.getElementById('markdown-output') + if (markdownRef) { + markdownRef.innerHTML = md.render(markdownContent); + } }); From 660572a0eaa171b2bbf8ca04898ab3470309fded Mon Sep 17 00:00:00 2001 From: Bai Date: Fri, 19 Apr 2024 04:37:03 +0800 Subject: [PATCH 03/14] =?UTF-8?q?fix:=20merge=5Fdelay=5Frun=20=E5=81=B6?= =?UTF-8?q?=E5=B0=94=E4=BC=9A=E5=87=BA=E7=8E=B0=20(2006,=20MySQL=20server?= =?UTF-8?q?=20has=20gone=20away=20=E7=9A=84=E6=8A=A5=E9=94=99)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- apps/common/db/utils.py | 13 ++++++++++++- apps/common/decorators.py | 5 ++++- 2 files changed, 16 insertions(+), 2 deletions(-) diff --git a/apps/common/db/utils.py b/apps/common/db/utils.py index 69c82e583..e61db0bfd 100644 --- a/apps/common/db/utils.py +++ b/apps/common/db/utils.py @@ -1,6 +1,6 @@ from contextlib import contextmanager -from django.db import connections +from django.db import connections, transaction from django.utils.encoding import force_str from common.utils import get_logger, signer, crypto @@ -58,6 +58,17 @@ def safe_db_connection(): close_old_connections() +@contextmanager +def open_db_connection(alias='default'): + connection = transaction.get_connection(alias) + try: + connection.connect() + with transaction.atomic(): + yield connection + finally: + connection.close() + + class Encryptor: def __init__(self, value): self.value = force_str(value) diff --git a/apps/common/decorators.py b/apps/common/decorators.py index 171a4ff33..65a9cad07 100644 --- a/apps/common/decorators.py +++ b/apps/common/decorators.py @@ -12,6 +12,7 @@ from functools import wraps from django.db import transaction from .utils import logger +from .db.utils import open_db_connection def on_transaction_commit(func): @@ -146,7 +147,9 @@ ignore_err_exceptions = ( def _run_func_with_org(key, org, func, *args, **kwargs): from orgs.utils import set_current_org try: - with transaction.atomic(): + with open_db_connection() as conn: + # 保证执行时使用的是新的 connection 数据库连接 + # 避免出现 MySQL server has gone away 的情况 set_current_org(org) func(*args, **kwargs) except Exception as e: From 64125051df5b06d129382d6e4e717b7f5e9870e9 Mon Sep 17 00:00:00 2001 From: Bai Date: Fri, 19 Apr 2024 07:32:42 +0800 Subject: [PATCH 04/14] fix: Org is None not has id attribute --- apps/orgs/signal_handlers/cache.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/apps/orgs/signal_handlers/cache.py b/apps/orgs/signal_handlers/cache.py index aec38527a..73faf1fd3 100644 --- a/apps/orgs/signal_handlers/cache.py +++ b/apps/orgs/signal_handlers/cache.py @@ -87,6 +87,8 @@ class OrgResourceStatisticsRefreshUtil: if not cache_field_name: return org = getattr(instance, 'org', None) + if not org: + return cache_field_name = tuple(cache_field_name) cls.refresh_org_fields.delay(org_fields=((org, cache_field_name),)) From 59f9a4f369de43fb906ce6c93206c53faf7fb4d6 Mon Sep 17 00:00:00 2001 From: fit2bot <68588906+fit2bot@users.noreply.github.com> Date: Fri, 19 Apr 2024 17:41:41 +0800 Subject: [PATCH 05/14] =?UTF-8?q?fix:=20=E8=8E=B7=E5=8F=96=20k8s=20?= =?UTF-8?q?=E6=A0=91=E5=8F=96=E6=B6=88=E5=BC=82=E5=B8=B8=20=E8=BF=94?= =?UTF-8?q?=E5=9B=9E=E7=A9=BA=20=E4=BC=98=E5=8C=96=E9=94=99=E8=AF=AF?= =?UTF-8?q?=E6=97=A5=E5=BF=97=20(#13077)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Co-authored-by: feng <1304903146@qq.com> --- apps/assets/utils/k8s.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/apps/assets/utils/k8s.py b/apps/assets/utils/k8s.py index dc3cd8bc6..8e1069566 100644 --- a/apps/assets/utils/k8s.py +++ b/apps/assets/utils/k8s.py @@ -88,8 +88,7 @@ class KubernetesClient: try: data = getattr(self, func_name)(*args) except Exception as e: - logger.error(e) - raise e + logger.error(f'K8S tree get {tp} error: {e}') if self.server: self.server.stop() From d4c842521852248d61c5104db481ef228713431e Mon Sep 17 00:00:00 2001 From: wangruidong <940853815@qq.com> Date: Fri, 19 Apr 2024 17:02:50 +0800 Subject: [PATCH 06/14] =?UTF-8?q?fix:=20=E5=BF=AB=E6=8D=B7=E5=91=BD?= =?UTF-8?q?=E4=BB=A4=E8=B4=A6=E5=8F=B7=E9=80=89=E6=8B=A9=E6=9C=AA=E6=8C=89?= =?UTF-8?q?=E8=B4=A6=E5=8F=B7=E6=95=B0=E9=87=8F=E6=8E=92=E5=BA=8F?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- apps/ops/api/job.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/apps/ops/api/job.py b/apps/ops/api/job.py index 98fded770..f67c886c4 100644 --- a/apps/ops/api/job.py +++ b/apps/ops/api/job.py @@ -304,6 +304,6 @@ class UsernameHintsAPI(APIView): .filter(username__icontains=query) \ .filter(asset__in=assets) \ .values('username') \ - .annotate(total=Count('username', distinct=True)) \ - .order_by('total', '-username')[:10] + .annotate(total=Count('username')) \ + .order_by('-total', '-username')[:10] return Response(data=top_accounts) From 94286caec4c1a459d2d5fff8f3ad3a627fecb74e Mon Sep 17 00:00:00 2001 From: feng <1304903146@qq.com> Date: Fri, 19 Apr 2024 17:08:03 +0800 Subject: [PATCH 07/14] =?UTF-8?q?fix:=20=E5=91=BD=E4=BB=A4=E8=BE=93?= =?UTF-8?q?=E5=87=BA=E5=8F=96=E6=B6=88=E9=95=BF=E5=BA=A6=E9=99=90=E5=88=B6?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- apps/terminal/serializers/command.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/apps/terminal/serializers/command.py b/apps/terminal/serializers/command.py index 0c2e9c949..1c1cb6f11 100644 --- a/apps/terminal/serializers/command.py +++ b/apps/terminal/serializers/command.py @@ -70,7 +70,7 @@ class SessionCommandSerializerMixin(serializers.Serializer): id = serializers.UUIDField(read_only=True) # 限制 64 字符,不能直接迁移成 128 字符,命令表数据量会比较大 account = serializers.CharField(label=_("Account ")) - output = serializers.CharField(max_length=2048, allow_blank=True, label=_("Output")) + output = serializers.CharField(allow_blank=True, label=_("Output")) timestamp = serializers.IntegerField(label=_('Timestamp')) timestamp_display = serializers.DateTimeField(read_only=True, label=_('Datetime')) remote_addr = serializers.CharField(read_only=True, label=_('Remote Address')) From 1ecf8534f6818962a65344b3b104b47afd242174 Mon Sep 17 00:00:00 2001 From: wangruidong <940853815@qq.com> Date: Fri, 19 Apr 2024 17:59:12 +0800 Subject: [PATCH 08/14] =?UTF-8?q?perf:=20=E5=85=BC=E5=AE=B9=E8=87=AA?= =?UTF-8?q?=E5=AE=9A=E4=B9=89=E5=B9=B3=E5=8F=B0=E7=9A=84=E5=8D=8E=E4=B8=BA?= =?UTF-8?q?=E4=BA=A4=E6=8D=A2=E6=9C=BA=E6=89=A7=E8=A1=8C=E5=91=BD=E4=BB=A4?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- apps/ops/ansible/inventory.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/apps/ops/ansible/inventory.py b/apps/ops/ansible/inventory.py index 963406e6a..452c7f2cb 100644 --- a/apps/ops/ansible/inventory.py +++ b/apps/ops/ansible/inventory.py @@ -5,6 +5,7 @@ import re from collections import defaultdict from django.utils.translation import gettext as _ +from assets.const.category import Category __all__ = ['JMSInventory'] @@ -124,7 +125,7 @@ class JMSInventory: else: host.update(self.make_account_ansible_vars(account, path_dir)) - if platform.name == 'Huawei': + if "huawei" in platform.name.lower() and platform.category == Category.DEVICE.value: host['ansible_connection'] = 'network_cli' host['ansible_network_os'] = 'asa' From ad0bc82539cd0b3f11a7312f9003a566e77a0ea5 Mon Sep 17 00:00:00 2001 From: Bai Date: Mon, 22 Apr 2024 11:27:48 +0800 Subject: [PATCH 09/14] =?UTF-8?q?perf:=20=E4=BC=98=E5=8C=96=20HUAWEI=20?= =?UTF-8?q?=E8=AE=BE=E5=A4=87=E5=88=A4=E6=96=AD=E9=80=BB=E8=BE=91?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- apps/assets/models/platform.py | 12 ++++++++++-- apps/ops/ansible/inventory.py | 2 +- 2 files changed, 11 insertions(+), 3 deletions(-) diff --git a/apps/assets/models/platform.py b/apps/assets/models/platform.py index a93a96863..85ec8d535 100644 --- a/apps/assets/models/platform.py +++ b/apps/assets/models/platform.py @@ -1,8 +1,7 @@ from django.db import models from django.utils.translation import gettext_lazy as _ -from assets.const import AllTypes -from assets.const import Protocol +from assets.const import AllTypes, Category, Protocol from common.db.fields import JsonDictTextField from common.db.models import JMSBaseModel @@ -119,6 +118,15 @@ class Platform(LabeledMixin, JMSBaseModel): ) return linux.id + def is_huawei(self): + if self.category != Category.DEVICE: + return False + if 'huawei' in self.name.lower(): + return True + if '华为' in self.name: + return True + return False + def __str__(self): return self.name diff --git a/apps/ops/ansible/inventory.py b/apps/ops/ansible/inventory.py index 452c7f2cb..f969861cb 100644 --- a/apps/ops/ansible/inventory.py +++ b/apps/ops/ansible/inventory.py @@ -125,7 +125,7 @@ class JMSInventory: else: host.update(self.make_account_ansible_vars(account, path_dir)) - if "huawei" in platform.name.lower() and platform.category == Category.DEVICE.value: + if platform.is_huawei(): host['ansible_connection'] = 'network_cli' host['ansible_network_os'] = 'asa' From ef7329a721e7e36c09c656596f33eccff768bdde Mon Sep 17 00:00:00 2001 From: jiangweidong <1053570670@qq.com> Date: Fri, 19 Apr 2024 17:11:25 +0800 Subject: [PATCH 10/14] =?UTF-8?q?perf:=20=E4=BC=98=E5=8C=96=E9=A2=91?= =?UTF-8?q?=E7=B9=81=E5=8F=91=E9=80=81=E7=9F=AD=E4=BF=A1=EF=BC=8C=E5=B0=86?= =?UTF-8?q?=E5=90=8E=E7=AB=AF=E7=9A=84=E9=A2=91=E7=B9=81=E5=8F=91=E9=80=81?= =?UTF-8?q?=E8=AD=A6=E5=91=8A=E6=8F=90=E7=A4=BA=E5=88=B0=E9=A1=B5=E9=9D=A2?= =?UTF-8?q?=E4=B8=8A=E6=9D=A5=E6=8F=90=E9=86=92=E7=94=A8=E6=88=B7?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- apps/authentication/api/mfa.py | 4 +++- apps/common/utils/verify_code.py | 10 +++++----- apps/templates/_mfa_login_field.html | 7 +++++++ 3 files changed, 15 insertions(+), 6 deletions(-) diff --git a/apps/authentication/api/mfa.py b/apps/authentication/api/mfa.py index 2d3a9b072..ed88b116e 100644 --- a/apps/authentication/api/mfa.py +++ b/apps/authentication/api/mfa.py @@ -9,7 +9,7 @@ from rest_framework.permissions import AllowAny from rest_framework.response import Response from rest_framework.serializers import ValidationError -from common.exceptions import UnexpectError +from common.exceptions import JMSException, UnexpectError from common.utils import get_logger from users.models.user import User from .. import errors @@ -61,6 +61,8 @@ class MFASendCodeApi(AuthMixin, CreateAPIView): try: mfa_backend.send_challenge() + except JMSException: + raise except Exception as e: raise UnexpectError(str(e)) diff --git a/apps/common/utils/verify_code.py b/apps/common/utils/verify_code.py index 7b2589f7d..115752f82 100644 --- a/apps/common/utils/verify_code.py +++ b/apps/common/utils/verify_code.py @@ -30,14 +30,14 @@ class SendAndVerifyCodeUtil(object): self.other_args = kwargs def gen_and_send_async(self): + ttl = self.__ttl() + if ttl > 0: + logger.warning('Send sms too frequently, delay {}'.format(ttl)) + raise CodeSendTooFrequently(ttl) + return send_async.apply_async(kwargs={"sender": self}, priority=100) def gen_and_send(self): - ttl = self.__ttl() - if ttl > 0: - logger.error('Send sms too frequently, delay {}'.format(ttl)) - raise CodeSendTooFrequently(ttl) - try: if not self.code: self.code = self.__generate() diff --git a/apps/templates/_mfa_login_field.html b/apps/templates/_mfa_login_field.html index 412b369ca..ec2477d1f 100644 --- a/apps/templates/_mfa_login_field.html +++ b/apps/templates/_mfa_login_field.html @@ -118,11 +118,18 @@ }) } + function onError (responseText, responseJson, status) { + setTimeout(function () { + toastr.error(responseJson.detail); + }); + }; + requestApi({ url: url, method: "POST", body: JSON.stringify(data), success: onSuccess, + error: onError, flash_message: false }) } From 52922088a92f97d2bc3f117f81ab83be1eb31e98 Mon Sep 17 00:00:00 2001 From: fit2bot <68588906+fit2bot@users.noreply.github.com> Date: Mon, 22 Apr 2024 13:51:52 +0800 Subject: [PATCH 11/14] =?UTF-8?q?feat:=20=E4=BC=98=E5=8C=96=E4=BB=A3?= =?UTF-8?q?=E7=A0=81=E7=BB=93=E6=9E=84=EF=BC=8Creceptor=E5=BC=80=E5=85=B3?= =?UTF-8?q?=EF=BC=8C=E4=BF=AE=E6=94=B9=E4=B8=BA=20tcp=20=E9=80=9A=E4=BF=A1?= =?UTF-8?q?=20(#13078)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * feat: 优化代码结构,receptor开关,修改为 tcp 通信 * fix: 修改导包路径 * fix: 修复错别字 * fix: 修改导包路径 * perf: 优化代码 * fix: 修复任务不执行的问题 * perf: 优化配置项名称 * perf: 优化代码结构 * perf: 优化代码 --------- Co-authored-by: Aaron3S --- apps/assets/automations/base/manager.py | 7 +- apps/jumpserver/conf.py | 6 +- apps/jumpserver/settings/custom.py | 5 +- .../receptor => libs/process}/__init__.py | 0 apps/libs/process/ssh.py | 26 ++++ apps/ops/ansible/__init__.py | 4 +- apps/ops/ansible/callback.py | 2 +- apps/ops/ansible/cleaner.py | 26 ++-- apps/ops/ansible/exception.py | 5 + apps/ops/ansible/interface.py | 46 ++++++ apps/ops/ansible/receptor/receptor_runner.py | 147 ------------------ apps/ops/ansible/runner.py | 45 ++---- apps/ops/ansible/runners/__init__.py | 3 + apps/ops/ansible/runners/base.py | 42 +++++ apps/ops/ansible/runners/native.py | 24 +++ apps/ops/ansible/runners/receptor.py | 100 ++++++++++++ .../ansible/runners/receptorctl/__init__.py | 0 .../runners/receptorctl/receptorctl.py | 38 +++++ apps/ops/models/job.py | 6 +- apps/ops/signal_handlers.py | 5 +- receptor | 30 +--- 21 files changed, 337 insertions(+), 230 deletions(-) rename apps/{ops/ansible/receptor => libs/process}/__init__.py (100%) create mode 100644 apps/libs/process/ssh.py create mode 100644 apps/ops/ansible/exception.py create mode 100644 apps/ops/ansible/interface.py delete mode 100644 apps/ops/ansible/receptor/receptor_runner.py create mode 100644 apps/ops/ansible/runners/__init__.py create mode 100644 apps/ops/ansible/runners/base.py create mode 100644 apps/ops/ansible/runners/native.py create mode 100644 apps/ops/ansible/runners/receptor.py create mode 100644 apps/ops/ansible/runners/receptorctl/__init__.py create mode 100644 apps/ops/ansible/runners/receptorctl/receptorctl.py diff --git a/apps/assets/automations/base/manager.py b/apps/assets/automations/base/manager.py index d94e5583c..570f6f195 100644 --- a/apps/assets/automations/base/manager.py +++ b/apps/assets/automations/base/manager.py @@ -12,7 +12,8 @@ from sshtunnel import SSHTunnelForwarder from assets.automations.methods import platform_automation_methods from common.utils import get_logger, lazyproperty, is_openssh_format_key, ssh_pubkey_gen -from ops.ansible import JMSInventory, SuperPlaybookRunner, DefaultCallback +from ops.ansible import JMSInventory, DefaultCallback, SuperPlaybookRunner +from ops.ansible.interface import interface logger = get_logger(__name__) @@ -54,7 +55,9 @@ class SSHTunnelManager: not_valid.append(k) else: local_bind_port = server.local_bind_port - host['ansible_host'] = jms_asset['address'] = host['login_host'] = 'jms_celery' + + host['ansible_host'] = jms_asset['address'] = host[ + 'login_host'] = interface.get_gateway_proxy_host() host['ansible_port'] = jms_asset['port'] = host['login_port'] = local_bind_port servers.append(server) diff --git a/apps/jumpserver/conf.py b/apps/jumpserver/conf.py index 7534683b2..1ee650b39 100644 --- a/apps/jumpserver/conf.py +++ b/apps/jumpserver/conf.py @@ -617,8 +617,10 @@ class Config(dict): 'TICKET_APPLY_ASSET_SCOPE': 'all', # Ansible Receptor - 'ANSIBLE_RECEPTOR_ENABLE': True, - 'ANSIBLE_RECEPTOR_SOCK_PATH': '{}/data/share/control.sock'.format(PROJECT_DIR) + 'ANSIBLE_RECEPTOR_ENABLED': True, + 'ANSIBLE_RECEPTOR_GATEWAY_PROXY_HOST': 'jms_celery', + 'ANSIBLE_RECEPTOR_TCP_LISTEN_ADDRESS': 'jms_receptor:7521' + } old_config_map = { diff --git a/apps/jumpserver/settings/custom.py b/apps/jumpserver/settings/custom.py index 453648240..5a4418270 100644 --- a/apps/jumpserver/settings/custom.py +++ b/apps/jumpserver/settings/custom.py @@ -232,5 +232,6 @@ FILE_UPLOAD_SIZE_LIMIT_MB = CONFIG.FILE_UPLOAD_SIZE_LIMIT_MB TICKET_APPLY_ASSET_SCOPE = CONFIG.TICKET_APPLY_ASSET_SCOPE # Ansible Receptor -ANSIBLE_RECEPTOR_ENABLE = CONFIG.ANSIBLE_RECEPTOR_ENABLE -ANSIBLE_RECEPTOR_SOCK_PATH = CONFIG.ANSIBLE_RECEPTOR_SOCK_PATH +ANSIBLE_RECEPTOR_ENABLED = CONFIG.ANSIBLE_RECEPTOR_ENABLED +ANSIBLE_RECEPTOR_GATEWAY_PROXY_HOST = CONFIG.ANSIBLE_RECEPTOR_GATEWAY_PROXY_HOST +ANSIBLE_RECEPTOR_TCP_LISTEN_ADDRESS = CONFIG.ANSIBLE_RECEPTOR_TCP_LISTEN_ADDRESS diff --git a/apps/ops/ansible/receptor/__init__.py b/apps/libs/process/__init__.py similarity index 100% rename from apps/ops/ansible/receptor/__init__.py rename to apps/libs/process/__init__.py diff --git a/apps/libs/process/ssh.py b/apps/libs/process/ssh.py new file mode 100644 index 000000000..baf0f776f --- /dev/null +++ b/apps/libs/process/ssh.py @@ -0,0 +1,26 @@ +import logging + +import psutil +from psutil import NoSuchProcess + +logger = logging.getLogger(__name__) + + +def _should_kill(process): + return process.pid != 1 and process.name() == 'ssh' + + +def kill_ansible_ssh_process(pid): + try: + process = psutil.Process(pid) + except NoSuchProcess as e: + logger.error(f"No such process: {e}") + return + + for child in process.children(recursive=True): + if not _should_kill(child): + return + try: + child.kill() + except Exception as e: + logger.error(f"Failed to kill process {child.pid}: {e}") diff --git a/apps/ops/ansible/__init__.py b/apps/ops/ansible/__init__.py index a175387eb..df052a0a0 100644 --- a/apps/ops/ansible/__init__.py +++ b/apps/ops/ansible/__init__.py @@ -2,5 +2,7 @@ from .callback import * from .inventory import * -from .runner import * +from .runners import * from .exceptions import * +from .runner import * +from .interface import * \ No newline at end of file diff --git a/apps/ops/ansible/callback.py b/apps/ops/ansible/callback.py index 80094332e..c11941be4 100644 --- a/apps/ops/ansible/callback.py +++ b/apps/ops/ansible/callback.py @@ -165,4 +165,4 @@ class DefaultCallback: def write_pid(self, pid): pid_filepath = os.path.join(self.private_data_dir, 'local.pid') with open(pid_filepath, 'w') as f: - f.write(str(pid)) \ No newline at end of file + f.write(str(pid)) diff --git a/apps/ops/ansible/cleaner.py b/apps/ops/ansible/cleaner.py index 5a2f29d2f..309ea5df4 100644 --- a/apps/ops/ansible/cleaner.py +++ b/apps/ops/ansible/cleaner.py @@ -4,6 +4,20 @@ from functools import wraps from settings.api import settings +__all__ = ["WorkPostRunCleaner", "cleanup_post_run"] + + +class WorkPostRunCleaner: + @property + def clean_dir(self): + raise NotImplemented + + def clean_post_run(self): + if settings.DEBUG_DEV: + return + if self.clean_dir and os.path.exists(self.clean_dir): + shutil.rmtree(self.clean_dir) + def cleanup_post_run(func): def get_instance(*args): @@ -22,15 +36,3 @@ def cleanup_post_run(func): instance.clean_post_run() return wrapper - - -class WorkPostRunCleaner: - @property - def clean_dir(self): - raise NotImplemented - - def clean_post_run(self): - if settings.DEBUG_DEV: - return - if self.clean_dir and os.path.exists(self.clean_dir): - shutil.rmtree(self.clean_dir) diff --git a/apps/ops/ansible/exception.py b/apps/ops/ansible/exception.py new file mode 100644 index 000000000..dc5926f05 --- /dev/null +++ b/apps/ops/ansible/exception.py @@ -0,0 +1,5 @@ +__all__ = ['CommandInBlackListException'] + + +class CommandInBlackListException(Exception): + pass diff --git a/apps/ops/ansible/interface.py b/apps/ops/ansible/interface.py new file mode 100644 index 000000000..065e80abc --- /dev/null +++ b/apps/ops/ansible/interface.py @@ -0,0 +1,46 @@ +from django.conf import settings +from django.utils.functional import LazyObject + +from ops.ansible import AnsibleReceptorRunner, AnsibleNativeRunner +from ops.ansible.runners.base import BaseRunner + +__all__ = ['interface'] + + +class _LazyRunnerInterface(LazyObject): + + def _setup(self): + self._wrapped = self.make_interface() + + @staticmethod + def make_interface(): + runner_type = AnsibleReceptorRunner \ + if settings.ANSIBLE_RECEPTOR_ENABLED else AnsibleNativeRunner + gateway_host = settings.ANSIBLE_RECEPTOR_GATEWAY_PROXY_HOST \ + if settings.ANSIBLE_RECEPTOR_GATEWAY_PROXY_HOST else '127.0.0.1' + return RunnerInterface(runner_type=runner_type, gateway_proxy_host=gateway_host) + + +interface = _LazyRunnerInterface() + + +class RunnerInterface: + def __init__(self, runner_type, gateway_proxy_host='127.0.0.1'): + if not issubclass(runner_type, BaseRunner): + raise TypeError(f'{runner_type} can not cast to {BaseRunner}') + self._runner_type = runner_type + self._gateway_proxy_host = gateway_proxy_host + + def get_gateway_proxy_host(self): + return self._gateway_proxy_host + + def get_runner_type(self): + return self._runner_type + + def kill_process(self, pid): + return self._runner_type.kill_precess(pid) + + def run(self, **kwargs): + runner_type = self.get_runner_type() + runner = runner_type(**kwargs) + return runner.run() diff --git a/apps/ops/ansible/receptor/receptor_runner.py b/apps/ops/ansible/receptor/receptor_runner.py deleted file mode 100644 index de07859ae..000000000 --- a/apps/ops/ansible/receptor/receptor_runner.py +++ /dev/null @@ -1,147 +0,0 @@ -import concurrent.futures -import os -import queue -import socket - -from django.conf import settings -import ansible_runner -from receptorctl import ReceptorControl - -from ops.ansible.cleaner import WorkPostRunCleaner, cleanup_post_run - - -class ReceptorCtl: - @property - def ctl(self): - return ReceptorControl(settings.ANSIBLE_RECEPTOR_SOCK_PATH) - - def cancel(self, unit_id): - return self.ctl.simple_command("work cancel {}".format(unit_id)) - - def nodes(self): - return self.ctl.simple_command("status").get("Advertisements", None) - - def submit_work(self, - worktype, - payload, - node=None, - tlsclient=None, - ttl=None, - signwork=False, - params=None, ): - return self.ctl.submit_work(worktype, payload, node, tlsclient, ttl, signwork, params) - - def get_work_results(self, unit_id, startpos=0, return_socket=False, return_sockfile=True): - return self.ctl.get_work_results(unit_id, startpos, return_socket, return_sockfile) - - def kill_process(self, pid): - submit_result = self.submit_work(worktype="kill", node="primary", payload=str(pid)) - unit_id = submit_result["unitid"] - result_socket, result_file = self.get_work_results(unit_id=unit_id, return_sockfile=True, - return_socket=True) - while not result_socket.close(): - buf = result_file.read() - if not buf: - break - print(buf.decode('utf8')) - - -receptor_ctl = ReceptorCtl() - - -def run(**kwargs): - receptor_runner = AnsibleReceptorRunner(**kwargs) - return receptor_runner.run() - - -class AnsibleReceptorRunner(WorkPostRunCleaner): - def __init__(self, **kwargs): - self.runner_params = kwargs - self.unit_id = None - self.clean_workspace = kwargs.pop("clean_workspace", True) - - def write_unit_id(self): - if not self.unit_id: - return - private_dir = self.runner_params.get("private_data_dir", "") - with open(os.path.join(private_dir, "local.unitid"), "w") as f: - f.write(self.unit_id) - f.flush() - - @property - def clean_dir(self): - if not self.clean_workspace: - return None - return self.runner_params.get("private_data_dir", None) - - @cleanup_post_run - def run(self): - input, output = socket.socketpair() - - with concurrent.futures.ThreadPoolExecutor(max_workers=1) as executor: - transmitter_future = executor.submit(self.transmit, input) - result = receptor_ctl.submit_work(payload=output.makefile('rb'), - node='primary', worktype='ansible-runner') - input.close() - output.close() - - self.unit_id = result['unitid'] - self.write_unit_id() - - transmitter_future.result() - - result_file = receptor_ctl.get_work_results(self.unit_id, return_sockfile=True) - - stdout_queue = queue.Queue() - - with concurrent.futures.ThreadPoolExecutor(max_workers=1) as executor: - processor_future = executor.submit(self.processor, result_file, stdout_queue) - - while not processor_future.done() or \ - not stdout_queue.empty(): - msg = stdout_queue.get() - if msg is None: - break - print(msg) - - return processor_future.result() - - def transmit(self, _socket): - try: - ansible_runner.run( - streamer='transmit', - _output=_socket.makefile('wb'), - **self.runner_params - ) - finally: - _socket.shutdown(socket.SHUT_WR) - - def processor(self, _result_file, stdout_queue): - try: - original_event_handler = self.runner_params.pop("event_handler", None) - original_status_handler = self.runner_params.pop("status_handler", None) - - def event_handler(data, **kwargs): - stdout = data.get('stdout', '') - if stdout: - stdout_queue.put(stdout) - if original_event_handler: - original_event_handler(data, **kwargs) - - def status_handler(data, **kwargs): - private_data_dir = self.runner_params.get("private_data_dir", None) - if private_data_dir: - data["private_data_dir"] = private_data_dir - if original_status_handler: - original_status_handler(data, **kwargs) - - return ansible_runner.interface.run( - quite=True, - streamer='process', - _input=_result_file, - event_handler=event_handler, - status_handler=status_handler, - **self.runner_params, - ) - finally: - stdout_queue.put(None) diff --git a/apps/ops/ansible/runner.py b/apps/ops/ansible/runner.py index 1d808f196..6af2fffb4 100644 --- a/apps/ops/ansible/runner.py +++ b/apps/ops/ansible/runner.py @@ -1,43 +1,24 @@ -import logging import os import shutil import uuid -import ansible_runner from django.conf import settings from django.utils._os import safe_join -from django.utils.functional import LazyObject - +from .interface import interface from .callback import DefaultCallback -from .receptor import receptor_runner +from .exception import CommandInBlackListException from ..utils import get_ansible_log_verbosity -logger = logging.getLogger(__file__) - - -class CommandInBlackListException(Exception): - pass - - -class AnsibleWrappedRunner(LazyObject): - def _setup(self): - self._wrapped = self.get_runner() - - @staticmethod - def get_runner(): - if settings.ANSIBLE_RECEPTOR_ENABLE and settings.ANSIBLE_RECEPTOR_SOCK_PATH: - return receptor_runner - return ansible_runner - - -runner = AnsibleWrappedRunner() +__all__ = ['AdHocRunner', 'PlaybookRunner', 'SuperPlaybookRunner', 'UploadFileRunner'] class AdHocRunner: cmd_modules_choices = ('shell', 'raw', 'command', 'script', 'win_shell') - def __init__(self, inventory, module, module_args='', pattern='*', project_dir='/tmp/', extra_vars={}, + def __init__(self, inventory, module, module_args='', pattern='*', project_dir='/tmp/', extra_vars=None, dry_run=False, timeout=-1): + if extra_vars is None: + extra_vars = {} self.id = uuid.uuid4() self.inventory = inventory self.pattern = pattern @@ -69,7 +50,7 @@ class AdHocRunner: if os.path.exists(private_env): shutil.rmtree(private_env) - runner.run( + interface.run( timeout=self.timeout if self.timeout > 0 else None, extravars=self.extra_vars, host_pattern=self.pattern, @@ -112,7 +93,7 @@ class PlaybookRunner: if os.path.exists(private_env): shutil.rmtree(private_env) - runner.run( + interface.run( private_data_dir=self.project_dir, inventory=self.inventory, playbook=self.playbook, @@ -144,7 +125,7 @@ class UploadFileRunner: def run(self, verbosity=0, **kwargs): verbosity = get_ansible_log_verbosity(verbosity) - runner.run( + interface.run( private_data_dir=self.project_dir, host_pattern="*", inventory=self.inventory, @@ -160,11 +141,3 @@ class UploadFileRunner: except OSError as e: print(f"del upload tmp dir {self.src_paths} failed! {e}") return self.cb - - -class CommandRunner(AdHocRunner): - def __init__(self, inventory, command, pattern='*', project_dir='/tmp/'): - super().__init__(inventory, 'shell', command, pattern, project_dir) - - def run(self, verbosity=0, **kwargs): - return super().run(verbosity, **kwargs) diff --git a/apps/ops/ansible/runners/__init__.py b/apps/ops/ansible/runners/__init__.py new file mode 100644 index 000000000..155e1b8e1 --- /dev/null +++ b/apps/ops/ansible/runners/__init__.py @@ -0,0 +1,3 @@ +from .base import * +from .native import * +from .receptor import * diff --git a/apps/ops/ansible/runners/base.py b/apps/ops/ansible/runners/base.py new file mode 100644 index 000000000..2cadbb368 --- /dev/null +++ b/apps/ops/ansible/runners/base.py @@ -0,0 +1,42 @@ +from ops.ansible.cleaner import WorkPostRunCleaner, cleanup_post_run + + +class BaseRunner(WorkPostRunCleaner): + + def __init__(self, **kwargs): + self.runner_params = kwargs + self.clean_workspace = kwargs.pop("clean_workspace", True) + + @classmethod + def kill_precess(cls, pid): + return NotImplementedError + + @property + def clean_dir(self): + if not self.clean_workspace: + return None + return self.private_data_dir + + @property + def private_data_dir(self): + return self.runner_params.get('private_data_dir', None) + + def get_event_handler(self): + _event_handler = self.runner_params.pop("event_handler", None) + return _event_handler + + def get_status_handler(self): + _status_handler = self.runner_params.pop("status_handler", None) + + if not _status_handler: + return + + def _handler(data, **kwargs): + if self.private_data_dir: + data["private_data_dir"] = self.private_data_dir + _status_handler(data, **kwargs) + + return _handler + + def run(self): + raise NotImplementedError() diff --git a/apps/ops/ansible/runners/native.py b/apps/ops/ansible/runners/native.py new file mode 100644 index 000000000..00f541ae8 --- /dev/null +++ b/apps/ops/ansible/runners/native.py @@ -0,0 +1,24 @@ +import ansible_runner + +from libs.process.ssh import kill_ansible_ssh_process +from ops.ansible.cleaner import cleanup_post_run +from ops.ansible.runners.base import BaseRunner + +__all__ = ['AnsibleNativeRunner'] + + +class AnsibleNativeRunner(BaseRunner): + def __init__(self, **kwargs): + super().__init__(**kwargs) + + @classmethod + def kill_precess(cls, pid): + return kill_ansible_ssh_process(pid) + + @cleanup_post_run + def run(self): + ansible_runner.run( + event_handler=self.get_event_handler(), + status_handler=self.get_status_handler(), + **self.runner_params, + ) diff --git a/apps/ops/ansible/runners/receptor.py b/apps/ops/ansible/runners/receptor.py new file mode 100644 index 000000000..1cc1de12c --- /dev/null +++ b/apps/ops/ansible/runners/receptor.py @@ -0,0 +1,100 @@ +import concurrent.futures +import os +import queue +import socket + +import ansible_runner + +from ops.ansible.cleaner import cleanup_post_run +from ops.ansible.runners.receptorctl.receptorctl import ReceptorCtl +from ops.ansible.runners.base import BaseRunner + +__all__ = ['AnsibleReceptorRunner'] + +receptor_ctl = ReceptorCtl() + + +class AnsibleReceptorRunner(BaseRunner): + def __init__(self, **kwargs): + super().__init__(**kwargs) + self.unit_id = None + self.stdout_queue = None + + @classmethod + def kill_precess(cls, pid): + return receptor_ctl.kill_process(pid) + + def write_unit_id(self): + if not self.unit_id: + return + private_dir = self.runner_params.get("private_data_dir", "") + with open(os.path.join(private_dir, "local.unitid"), "w") as f: + f.write(self.unit_id) + f.flush() + + @cleanup_post_run + def run(self): + input, output = socket.socketpair() + + with concurrent.futures.ThreadPoolExecutor(max_workers=1) as executor: + transmitter_future = executor.submit(self.transmit, input) + result = receptor_ctl.submit_work(payload=output.makefile('rb'), + node='primary', worktype='ansible-runner') + + input.close() + output.close() + + self.unit_id = result['unitid'] + self.write_unit_id() + + transmitter_future.result() + + result_file = receptor_ctl.get_work_results(self.unit_id, return_sockfile=True) + + self.stdout_queue = queue.Queue() + + with concurrent.futures.ThreadPoolExecutor(max_workers=1) as executor: + processor_future = executor.submit(self.processor, result_file) + + while not processor_future.done() or \ + not self.stdout_queue.empty(): + msg = self.stdout_queue.get() + if msg is None: + break + print(msg) + + return processor_future.result() + + def transmit(self, _socket): + try: + ansible_runner.run( + streamer='transmit', + _output=_socket.makefile('wb'), + **self.runner_params + ) + finally: + _socket.shutdown(socket.SHUT_WR) + + def get_event_handler(self): + _event_handler = super().get_event_handler() + + def _handler(data, **kwargs): + stdout = data.get('stdout', '') + if stdout: + self.stdout_queue.put(stdout) + _event_handler(data, **kwargs) + + return _handler + + def processor(self, _result_file): + try: + return ansible_runner.interface.run( + quite=True, + streamer='process', + _input=_result_file, + event_handler=self.get_event_handler(), + status_handler=self.get_status_handler(), + **self.runner_params, + ) + finally: + self.stdout_queue.put(None) diff --git a/apps/ops/ansible/runners/receptorctl/__init__.py b/apps/ops/ansible/runners/receptorctl/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/apps/ops/ansible/runners/receptorctl/receptorctl.py b/apps/ops/ansible/runners/receptorctl/receptorctl.py new file mode 100644 index 000000000..0c3d44c10 --- /dev/null +++ b/apps/ops/ansible/runners/receptorctl/receptorctl.py @@ -0,0 +1,38 @@ +from django.conf import settings +from receptorctl import ReceptorControl + + +class ReceptorCtl: + @property + def ctl(self): + return ReceptorControl("tcp://{}".format(settings.ANSIBLE_RECEPTOR_TCP_LISTEN_ADDRESS)) + + def cancel(self, unit_id): + return self.ctl.simple_command("work cancel {}".format(unit_id)) + + def nodes(self): + return self.ctl.simple_command("status").get("Advertisements", None) + + def submit_work(self, + worktype, + payload, + node=None, + tlsclient=None, + ttl=None, + signwork=False, + params=None, ): + return self.ctl.submit_work(worktype, payload, node, tlsclient, ttl, signwork, params) + + def get_work_results(self, unit_id, startpos=0, return_socket=False, return_sockfile=True): + return self.ctl.get_work_results(unit_id, startpos, return_socket, return_sockfile) + + def kill_process(self, pid): + submit_result = self.submit_work(worktype="kill", node="primary", payload=str(pid)) + unit_id = submit_result["unitid"] + result_socket, result_file = self.get_work_results(unit_id=unit_id, return_sockfile=True, + return_socket=True) + while not result_socket.close(): + buf = result_file.read() + if not buf: + break + print(buf.decode('utf8')) diff --git a/apps/ops/models/job.py b/apps/ops/models/job.py index e7245568b..f1fd8adb9 100644 --- a/apps/ops/models/job.py +++ b/apps/ops/models/job.py @@ -22,8 +22,10 @@ from acls.models import CommandFilterACL from assets.models import Asset from assets.automations.base.manager import SSHTunnelManager from common.db.encoder import ModelJSONFieldEncoder -from ops.ansible import JMSInventory, AdHocRunner, PlaybookRunner, CommandInBlackListException, UploadFileRunner -from ops.ansible.receptor import receptor_runner +from ops.ansible import JMSInventory, AdHocRunner, PlaybookRunner, UploadFileRunner + +"""stop all ssh child processes of the given ansible process pid.""" +from ops.ansible.exception import CommandInBlackListException from ops.mixin import PeriodTaskModelMixin from ops.variables import * from ops.const import Types, RunasPolicies, JobStatus, JobModules diff --git a/apps/ops/signal_handlers.py b/apps/ops/signal_handlers.py index ad3428100..c070fd094 100644 --- a/apps/ops/signal_handlers.py +++ b/apps/ops/signal_handlers.py @@ -1,4 +1,3 @@ -import ast import json import time @@ -17,9 +16,9 @@ from common.signals import django_ready from common.utils.connection import RedisPubSub from jumpserver.utils import get_current_request from orgs.utils import get_current_org_id, set_current_org -from .ansible.receptor.receptor_runner import receptor_ctl from .celery import app from .models import CeleryTaskExecution, CeleryTask, Job +from .ansible.runner import interface logger = get_logger(__name__) @@ -167,7 +166,7 @@ def subscribe_stop_job_execution(sender, **kwargs): def on_stop(pid): logger.info(f"Stop job execution {pid} start") - receptor_ctl.kill_process(pid) + interface.kill_process(pid) job_execution_stop_pub_sub.subscribe(on_stop) diff --git a/receptor b/receptor index f9a17b711..d32f40889 100755 --- a/receptor +++ b/receptor @@ -9,8 +9,7 @@ import os import signal import tempfile -import psutil -from psutil import NoSuchProcess +from apps.libs.process.ssh import kill_ansible_ssh_process ANSIBLE_RUNNER_COMMAND = "ansible-runner" @@ -22,6 +21,8 @@ DEFAULT_SHARE_DIR = os.path.join(PROJECT_DIR, "data", "share") DEFAULT_ANSIBLE_MODULES_DIR = os.path.join(APPS_DIR, "libs", "ansible", "modules") DEFAULT_CONTROL_SOCK_PATH = os.path.join(DEFAULT_SHARE_DIR, "control.sock") +DEFAULT_TCP_LISTEN_ADDRESS = "0.0.0.0:7521" + logger = logging.getLogger(__name__) os.chdir(APPS_DIR) @@ -34,9 +35,10 @@ class ReceptorService: 'receptor', '--local-only', '--node', 'id=primary', + '--log-level', 'level=Error', '--control-service', 'service=control', - 'filename={}'.format(DEFAULT_CONTROL_SOCK_PATH), + 'tcplisten={}'.format(DEFAULT_TCP_LISTEN_ADDRESS), '--work-command', 'worktype={}'.format(ANSIBLE_RUNNER_COMMAND), 'command={}'.format(ANSIBLE_RUNNER_COMMAND), @@ -49,6 +51,7 @@ class ReceptorService: 'allowruntimeparams=true' ] + @staticmethod def before_start(): os.makedirs(os.path.join(DEFAULT_SHARE_DIR), exist_ok=True) @@ -141,29 +144,12 @@ def kill_progress_tree(pid=None): try: pid_input = input() pid = int(pid_input) + logger.info("progress {} will be kill".format(pid)) + kill_ansible_ssh_process(pid) except Exception as e: logger.error(e) return - logger.info("progress {} will be kill".format(pid)) - - try: - current_process = psutil.Process(pid) - except NoSuchProcess as e: - logger.error(e) - return - - children = current_process.children(recursive=True) - for child in children: - if child.pid == 1: - continue - if child.name() != 'ssh': - continue - try: - child.kill() - except Exception as e: - logger.error(e) - if __name__ == '__main__': parser = argparse.ArgumentParser( From 2cc67634a40ebdbf57b48d238335b76988f6f183 Mon Sep 17 00:00:00 2001 From: Eric Date: Mon, 22 Apr 2024 16:29:29 +0800 Subject: [PATCH 12/14] =?UTF-8?q?perf:=20=E5=8F=91=E5=B8=83=E6=9C=BA?= =?UTF-8?q?=E6=94=AF=E6=8C=81=E5=B9=B3=E5=8F=B0=E8=BF=9E=E6=8E=A5=E5=8F=82?= =?UTF-8?q?=E6=95=B0?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- apps/authentication/models/connection_token.py | 2 ++ apps/authentication/serializers/connect_token_secret.py | 1 + 2 files changed, 3 insertions(+) diff --git a/apps/authentication/models/connection_token.py b/apps/authentication/models/connection_token.py index f07874a2a..07fd6483b 100644 --- a/apps/authentication/models/connection_token.py +++ b/apps/authentication/models/connection_token.py @@ -204,12 +204,14 @@ class ConnectionToken(JMSOrgBaseModel): host, account, lock_key = bulk_get(host_account, ('host', 'account', 'lock_key')) gateway = host.domain.select_gateway() if host.domain else None + platform = host.platform data = { 'id': lock_key, 'applet': applet, 'host': host, 'gateway': gateway, + 'platform': platform, 'account': account, 'remote_app_option': self.get_remote_app_option() } diff --git a/apps/authentication/serializers/connect_token_secret.py b/apps/authentication/serializers/connect_token_secret.py index 3eea79e4e..500daa410 100644 --- a/apps/authentication/serializers/connect_token_secret.py +++ b/apps/authentication/serializers/connect_token_secret.py @@ -161,6 +161,7 @@ class ConnectTokenAppletOptionSerializer(serializers.Serializer): host = _ConnectionTokenAssetSerializer(read_only=True) account = _ConnectionTokenAccountSerializer(read_only=True) gateway = _ConnectionTokenGatewaySerializer(read_only=True) + platform = _ConnectionTokenPlatformSerializer(read_only=True) remote_app_option = serializers.JSONField(read_only=True) From 6b5d4a481058754cb757b9a8fc3ca74dd723faf9 Mon Sep 17 00:00:00 2001 From: Bai Date: Mon, 22 Apr 2024 19:31:17 +0800 Subject: [PATCH 13/14] =?UTF-8?q?fix:=20=E4=BF=AE=E5=A4=8D=E4=BB=AA?= =?UTF-8?q?=E8=A1=A8=E7=9B=98=E4=BC=9A=E8=AF=9D=E6=8E=92=E5=BA=8F=E6=95=B0?= =?UTF-8?q?=E9=87=8F=E9=83=BD=E6=98=AF=201=20=E7=9A=84=E9=97=AE=E9=A2=98?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- apps/jumpserver/api.py | 6 ++---- 1 file changed, 2 insertions(+), 4 deletions(-) diff --git a/apps/jumpserver/api.py b/apps/jumpserver/api.py index 6b5d162e5..2de0b7b07 100644 --- a/apps/jumpserver/api.py +++ b/apps/jumpserver/api.py @@ -186,15 +186,13 @@ class DatesLoginMetricMixin: return self.get_date_metrics(Session.objects, 'date_start', 'id') def get_dates_login_times_assets(self): - assets = self.sessions_queryset.values("asset") \ - .annotate(total=Count("asset")) \ + assets = self.sessions_queryset.values("asset").annotate(total=Count("asset")) \ .annotate(last=Cast(Max("date_start"), output_field=CharField())) \ .order_by("-total") return list(assets[:10]) def get_dates_login_times_users(self): - users = self.sessions_queryset.values("user_id") \ - .annotate(total=Count("user_id")) \ + users = self.sessions_queryset.values("user_id").annotate(total=Count("user_id")) \ .annotate(user=Max('user')) \ .annotate(last=Cast(Max("date_start"), output_field=CharField())) \ .order_by("-total") From d418647774ee70a50afe66ad893dcd732aa87cb2 Mon Sep 17 00:00:00 2001 From: Bai Date: Mon, 22 Apr 2024 19:36:26 +0800 Subject: [PATCH 14/14] =?UTF-8?q?fix:=20=E4=BF=AE=E5=A4=8D=E4=BB=AA?= =?UTF-8?q?=E8=A1=A8=E7=9B=98=E4=BC=9A=E8=AF=9D=E6=8E=92=E5=BA=8F=E6=95=B0?= =?UTF-8?q?=E9=87=8F=E9=83=BD=E6=98=AF=201=20=E7=9A=84=E9=97=AE=E9=A2=98?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- apps/jumpserver/api.py | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/apps/jumpserver/api.py b/apps/jumpserver/api.py index 2de0b7b07..6b5d162e5 100644 --- a/apps/jumpserver/api.py +++ b/apps/jumpserver/api.py @@ -186,13 +186,15 @@ class DatesLoginMetricMixin: return self.get_date_metrics(Session.objects, 'date_start', 'id') def get_dates_login_times_assets(self): - assets = self.sessions_queryset.values("asset").annotate(total=Count("asset")) \ + assets = self.sessions_queryset.values("asset") \ + .annotate(total=Count("asset")) \ .annotate(last=Cast(Max("date_start"), output_field=CharField())) \ .order_by("-total") return list(assets[:10]) def get_dates_login_times_users(self): - users = self.sessions_queryset.values("user_id").annotate(total=Count("user_id")) \ + users = self.sessions_queryset.values("user_id") \ + .annotate(total=Count("user_id")) \ .annotate(user=Max('user')) \ .annotate(last=Cast(Max("date_start"), output_field=CharField())) \ .order_by("-total")