Merge branch 'dev' into dev

pull/12949/head
Bryan 2024-04-08 18:01:55 +08:00 committed by GitHub
commit 3456e9ac5b
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
28 changed files with 416 additions and 86 deletions

1
.gitignore vendored
View File

@ -43,3 +43,4 @@ releashe
data/* data/*
test.py test.py
.history/ .history/
.test/

View File

@ -109,7 +109,15 @@ RUN --mount=type=cache,target=/var/cache/apt,sharing=locked,id=core-apt \
&& echo "no" | dpkg-reconfigure dash \ && echo "no" | dpkg-reconfigure dash \
&& echo "zh_CN.UTF-8" | dpkg-reconfigure locales \ && echo "zh_CN.UTF-8" | dpkg-reconfigure locales \
&& sed -i "s@# export @export @g" ~/.bashrc \ && sed -i "s@# export @export @g" ~/.bashrc \
&& sed -i "s@# alias @alias @g" ~/.bashrc && sed -i "s@# alias @alias @g" ~/.bashrc \
ARG RECEPTOR_VERSION=v1.4.5
RUN set -ex \
&& wget -O /opt/receptor.tar.gz https://github.com/ansible/receptor/releases/download/${RECEPTOR_VERSION}/receptor_${RECEPTOR_VERSION/v/}_linux_${TARGETARCH}.tar.gz \
&& tar -xf /opt/receptor.tar.gz -C /usr/local/bin/ \
&& chown root:root /usr/local/bin/receptor \
&& chmod 755 /usr/local/bin/receptor \
&& rm -f /opt/receptor.tar.gz
COPY --from=stage-2 /opt/py3 /opt/py3 COPY --from=stage-2 /opt/py3 /opt/py3
COPY --from=stage-1 /opt/jumpserver/release/jumpserver /opt/jumpserver COPY --from=stage-1 /opt/jumpserver/release/jumpserver /opt/jumpserver

View File

@ -21,6 +21,7 @@ class AssetPlatformViewSet(JMSModelViewSet):
} }
filterset_fields = ['name', 'category', 'type'] filterset_fields = ['name', 'category', 'type']
search_fields = ['name'] search_fields = ['name']
ordering = ['-internal', 'name']
rbac_perms = { rbac_perms = {
'categories': 'assets.view_platform', 'categories': 'assets.view_platform',
'type_constraints': 'assets.view_platform', 'type_constraints': 'assets.view_platform',

View File

@ -5,11 +5,13 @@ from django.conf import settings
from django.contrib.auth import login from django.contrib.auth import login
from django.http.response import HttpResponseRedirect from django.http.response import HttpResponseRedirect
from rest_framework import serializers from rest_framework import serializers
from rest_framework import status
from rest_framework.decorators import action from rest_framework.decorators import action
from rest_framework.permissions import AllowAny from rest_framework.permissions import AllowAny
from rest_framework.request import Request from rest_framework.request import Request
from rest_framework.response import Response from rest_framework.response import Response
from authentication.errors import ACLError
from common.api import JMSGenericViewSet from common.api import JMSGenericViewSet
from common.const.http import POST, GET from common.const.http import POST, GET
from common.permissions import OnlySuperUser from common.permissions import OnlySuperUser
@ -17,7 +19,10 @@ from common.serializers import EmptySerializer
from common.utils import reverse, safe_next_url from common.utils import reverse, safe_next_url
from common.utils.timezone import utc_now from common.utils.timezone import utc_now
from users.models import User from users.models import User
from ..errors import SSOAuthClosed from users.utils import LoginBlockUtil, LoginIpBlockUtil
from ..errors import (
SSOAuthClosed, AuthFailedError, LoginConfirmBaseError, SSOAuthKeyTTLError
)
from ..filters import AuthKeyQueryDeclaration from ..filters import AuthKeyQueryDeclaration
from ..mixins import AuthMixin from ..mixins import AuthMixin
from ..models import SSOToken from ..models import SSOToken
@ -63,31 +68,58 @@ class SSOViewSet(AuthMixin, JMSGenericViewSet):
此接口违反了 `Restful` 的规范 此接口违反了 `Restful` 的规范
`GET` 应该是安全的方法但此接口是不安全的 `GET` 应该是安全的方法但此接口是不安全的
""" """
status_code = status.HTTP_400_BAD_REQUEST
request.META['HTTP_X_JMS_LOGIN_TYPE'] = 'W' request.META['HTTP_X_JMS_LOGIN_TYPE'] = 'W'
authkey = request.query_params.get(AUTH_KEY) authkey = request.query_params.get(AUTH_KEY)
next_url = request.query_params.get(NEXT_URL) next_url = request.query_params.get(NEXT_URL)
if not next_url or not next_url.startswith('/'): if not next_url or not next_url.startswith('/'):
next_url = reverse('index') next_url = reverse('index')
try:
if not authkey: if not authkey:
raise serializers.ValidationError("authkey is required") raise serializers.ValidationError("authkey is required")
try:
authkey = UUID(authkey) authkey = UUID(authkey)
token = SSOToken.objects.get(authkey=authkey, expired=False) token = SSOToken.objects.get(authkey=authkey, expired=False)
# 先过期,只能访问这一次 except (ValueError, SSOToken.DoesNotExist, serializers.ValidationError) as e:
token.expired = True error_msg = str(e)
token.save() self.send_auth_signal(success=False, reason=error_msg)
except (ValueError, SSOToken.DoesNotExist): return Response({'error': error_msg}, status=status_code)
self.send_auth_signal(success=False, reason='authkey_invalid')
return HttpResponseRedirect(next_url)
# 判断是否过期
if (utc_now().timestamp() - token.date_created.timestamp()) > settings.AUTH_SSO_AUTHKEY_TTL:
self.send_auth_signal(success=False, reason='authkey_timeout')
return HttpResponseRedirect(next_url)
error_msg = None
user = token.user user = token.user
username = user.username
ip = self.get_request_ip()
try:
if (utc_now().timestamp() - token.date_created.timestamp()) > settings.AUTH_SSO_AUTHKEY_TTL:
raise SSOAuthKeyTTLError()
self._check_is_block(username, True)
self._check_only_allow_exists_user_auth(username)
self._check_login_acl(user, ip)
self.check_user_login_confirm_if_need(user)
self.request.session['auth_backend'] = settings.AUTH_BACKEND_SSO
login(self.request, user, settings.AUTH_BACKEND_SSO) login(self.request, user, settings.AUTH_BACKEND_SSO)
self.send_auth_signal(success=True, user=user) self.send_auth_signal(success=True, user=user)
self.mark_mfa_ok('otp', user)
LoginIpBlockUtil(ip).clean_block_if_need()
LoginBlockUtil(username, ip).clean_failed_count()
self.clear_auth_mark()
except (ACLError, LoginConfirmBaseError): # 无需记录日志
pass
except (AuthFailedError, SSOAuthKeyTTLError) as e:
error_msg = e.msg
except Exception as e:
error_msg = str(e)
finally:
token.expired = True
token.save()
if error_msg:
self.send_auth_signal(success=False, username=username, reason=error_msg)
return Response({'error': error_msg}, status=status_code)
else:
return HttpResponseRedirect(next_url) return HttpResponseRedirect(next_url)

View File

@ -52,6 +52,10 @@ class AuthFailedError(Exception):
return str(self.msg) return str(self.msg)
class SSOAuthKeyTTLError(Exception):
msg = 'sso_authkey_timeout'
class BlockGlobalIpLoginError(AuthFailedError): class BlockGlobalIpLoginError(AuthFailedError):
error = 'block_global_ip_login' error = 'block_global_ip_login'

View File

@ -363,7 +363,6 @@ class AuthACLMixin:
if acl.is_action(acl.ActionChoices.notice): if acl.is_action(acl.ActionChoices.notice):
self.request.session['auth_notice_required'] = '1' self.request.session['auth_notice_required'] = '1'
self.request.session['auth_acl_id'] = str(acl.id) self.request.session['auth_acl_id'] = str(acl.id)
return
def _check_third_party_login_acl(self): def _check_third_party_login_acl(self):
request = self.request request = self.request

View File

@ -17,6 +17,7 @@ class Services(TextChoices):
web = 'web', 'web' web = 'web', 'web'
celery = 'celery', 'celery' celery = 'celery', 'celery'
task = 'task', 'task' task = 'task', 'task'
receptor = 'receptor', 'receptor'
all = 'all', 'all' all = 'all', 'all'
@classmethod @classmethod
@ -27,7 +28,8 @@ class Services(TextChoices):
cls.flower: services.FlowerService, cls.flower: services.FlowerService,
cls.celery_default: services.CeleryDefaultService, cls.celery_default: services.CeleryDefaultService,
cls.celery_ansible: services.CeleryAnsibleService, cls.celery_ansible: services.CeleryAnsibleService,
cls.beat: services.BeatService cls.beat: services.BeatService,
cls.receptor: services.ReceptorService
} }
return services_map.get(name) return services_map.get(name)
@ -43,9 +45,13 @@ class Services(TextChoices):
def task_services(cls): def task_services(cls):
return cls.celery_services() + [cls.beat] return cls.celery_services() + [cls.beat]
@classmethod
def receptor_services(cls):
return [cls.receptor]
@classmethod @classmethod
def all_services(cls): def all_services(cls):
return cls.web_services() + cls.task_services() return cls.web_services() + cls.task_services() + cls.receptor_services()
@classmethod @classmethod
def export_services_values(cls): def export_services_values(cls):

View File

@ -3,3 +3,4 @@ from .celery_ansible import *
from .celery_default import * from .celery_default import *
from .flower import * from .flower import *
from .gunicorn import * from .gunicorn import *
from .receptor import *

View File

@ -0,0 +1,32 @@
from .base import BaseService
from ..hands import *
__all__ = ['ReceptorService']
ANSIBLE_RUNNER_COMMAND = "ansible-runner"
class ReceptorService(BaseService):
@property
def cmd(self):
print("\n- Start Receptor as Ansible Runner Sandbox")
cmd = [
'receptor',
'--local-only',
'--node', 'id=primary',
'--control-service',
'service=control',
'filename=/opt/jumpserver/data/share/control.sock',
'--work-command',
'worktype={}'.format(ANSIBLE_RUNNER_COMMAND),
'command={}'.format(ANSIBLE_RUNNER_COMMAND),
'params=worker',
'allowruntimeparams=true'
]
return cmd
@property
def cwd(self):
return APPS_DIR

View File

@ -613,7 +613,11 @@ class Config(dict):
'FILE_UPLOAD_SIZE_LIMIT_MB': 200, 'FILE_UPLOAD_SIZE_LIMIT_MB': 200,
'TICKET_APPLY_ASSET_SCOPE': 'all' 'TICKET_APPLY_ASSET_SCOPE': 'all',
# Ansible Receptor
'ANSIBLE_RECEPTOR_ENABLE': True,
'ANSIBLE_RECEPTOR_SOCK_PATH': '/opt/jumpserver/data/share/control.sock'
} }
old_config_map = { old_config_map = {

View File

@ -230,3 +230,7 @@ VIRTUAL_APP_ENABLED = CONFIG.VIRTUAL_APP_ENABLED
FILE_UPLOAD_SIZE_LIMIT_MB = CONFIG.FILE_UPLOAD_SIZE_LIMIT_MB FILE_UPLOAD_SIZE_LIMIT_MB = CONFIG.FILE_UPLOAD_SIZE_LIMIT_MB
TICKET_APPLY_ASSET_SCOPE = CONFIG.TICKET_APPLY_ASSET_SCOPE TICKET_APPLY_ASSET_SCOPE = CONFIG.TICKET_APPLY_ASSET_SCOPE
# Ansible Receptor
ANSIBLE_RECEPTOR_ENABLE = CONFIG.ANSIBLE_RECEPTOR_ENABLE
ANSIBLE_RECEPTOR_SOCK_PATH = CONFIG.ANSIBLE_RECEPTOR_SOCK_PATH

View File

@ -1,3 +1,3 @@
version https://git-lfs.github.com/spec/v1 version https://git-lfs.github.com/spec/v1
oid sha256:0bd11124a56e5fa0b2b8433528d4ffd8c454e5f529bdd72fea15d1a62434165e oid sha256:488d95a4a96d38c3c0633f183334498d9e247bdf66ce3a4bcc836f80e8320432
size 176114 size 176705

View File

@ -8,7 +8,7 @@ msgid ""
msgstr "" msgstr ""
"Project-Id-Version: PACKAGE VERSION\n" "Project-Id-Version: PACKAGE VERSION\n"
"Report-Msgid-Bugs-To: \n" "Report-Msgid-Bugs-To: \n"
"POT-Creation-Date: 2024-04-03 16:51+0800\n" "POT-Creation-Date: 2024-04-07 14:23+0800\n"
"PO-Revision-Date: YEAR-MO-DA HO:MI+ZONE\n" "PO-Revision-Date: YEAR-MO-DA HO:MI+ZONE\n"
"Last-Translator: FULL NAME <EMAIL@ADDRESS>\n" "Last-Translator: FULL NAME <EMAIL@ADDRESS>\n"
"Language-Team: LANGUAGE <LL@li.org>\n" "Language-Team: LANGUAGE <LL@li.org>\n"
@ -1362,13 +1362,13 @@ msgstr "アプリケーション"
msgid "Can match application" msgid "Can match application"
msgstr "アプリケーションを一致させることができます" msgstr "アプリケーションを一致させることができます"
#: assets/api/asset/asset.py:181 #: assets/api/asset/asset.py:180
msgid "Cannot create asset directly, you should create a host or other" msgid "Cannot create asset directly, you should create a host or other"
msgstr "" msgstr ""
"資産を直接作成することはできません。ホストまたはその他を作成する必要がありま" "資産を直接作成することはできません。ホストまたはその他を作成する必要がありま"
"す" "す"
#: assets/api/domain.py:68 #: assets/api/domain.py:67
msgid "Number required" msgid "Number required"
msgstr "必要な数" msgstr "必要な数"
@ -4137,11 +4137,24 @@ msgstr "タスクは存在しません"
msgid "Task {} args or kwargs error" msgid "Task {} args or kwargs error"
msgstr "タスク実行パラメータエラー" msgstr "タスク実行パラメータエラー"
#: ops/api/job.py:146 msgid ""
"Asset ({asset}) must have at least one of the following protocols added: "
"SSH, SFTP, or WinRM"
msgstr ""
"資産({asset})には、少なくともSSH、SFTP、WinRMのいずれか一つのプロトコルを追加す"
"る必要があります"
msgid "Asset ({asset}) authorization is missing SSH, SFTP, or WinRM protocol"
msgstr "資産({asset})の認証にはSSH、SFTP、またはWinRMプロトコルが不足しています"
msgid "Asset ({asset}) authorization lacks upload permissions"
msgstr "資産({asset})の認証にはアップロード権限が不足しています"
#: ops/api/job.py:168
msgid "Duplicate file exists" msgid "Duplicate file exists"
msgstr "重複したファイルが存在する" msgstr "重複したファイルが存在する"
#: ops/api/job.py:151 #: ops/api/job.py:173
#, python-brace-format #, python-brace-format
msgid "" msgid ""
"File size exceeds maximum limit. Please select a file smaller than {limit}MB" "File size exceeds maximum limit. Please select a file smaller than {limit}MB"
@ -4149,7 +4162,7 @@ msgstr ""
"ファイルサイズが最大制限を超えています。{limit}MB より小さいファイルを選択し" "ファイルサイズが最大制限を超えています。{limit}MB より小さいファイルを選択し"
"てください。" "てください。"
#: ops/api/job.py:215 #: ops/api/job.py:237
msgid "" msgid ""
"The task is being created and cannot be interrupted. Please try again later." "The task is being created and cannot be interrupted. Please try again later."
msgstr "タスクを作成中で、中断できません。後でもう一度お試しください。" msgstr "タスクを作成中で、中断できません。後でもう一度お試しください。"
@ -4537,18 +4550,18 @@ msgstr "ジョブのID"
msgid "Name of the job" msgid "Name of the job"
msgstr "ジョブの名前" msgstr "ジョブの名前"
#: orgs/api.py:62 #: orgs/api.py:61
msgid "The current organization ({}) cannot be deleted" msgid "The current organization ({}) cannot be deleted"
msgstr "現在の組織 ({}) は削除できません" msgstr "現在の組織 ({}) は削除できません"
#: orgs/api.py:67 #: orgs/api.py:66
msgid "" msgid ""
"LDAP synchronization is set to the current organization. Please switch to " "LDAP synchronization is set to the current organization. Please switch to "
"another organization before deleting" "another organization before deleting"
msgstr "" msgstr ""
"LDAP 同期は現在の組織に設定されます。削除する前に別の組織に切り替えてください" "LDAP 同期は現在の組織に設定されます。削除する前に別の組織に切り替えてください"
#: orgs/api.py:77 #: orgs/api.py:76
msgid "The organization have resource ({}) cannot be deleted" msgid "The organization have resource ({}) cannot be deleted"
msgstr "組織のリソース ({}) は削除できません" msgstr "組織のリソース ({}) は削除できません"
@ -6531,11 +6544,11 @@ msgstr "これはエンタープライズ版アプレットです"
msgid "Not found protocol query params" msgid "Not found protocol query params"
msgstr "プロトコルクエリパラメータが見つかりません" msgstr "プロトコルクエリパラメータが見つかりません"
#: terminal/api/component/storage.py:30 #: terminal/api/component/storage.py:31
msgid "Deleting the default storage is not allowed" msgid "Deleting the default storage is not allowed"
msgstr "デフォルトのストレージの削除は許可されていません" msgstr "デフォルトのストレージの削除は許可されていません"
#: terminal/api/component/storage.py:33 #: terminal/api/component/storage.py:34
msgid "Cannot delete storage that is being used" msgid "Cannot delete storage that is being used"
msgstr "使用中のストレージを削除できません" msgstr "使用中のストレージを削除できません"
@ -6547,15 +6560,15 @@ msgstr "コマンドストア"
msgid "Invalid" msgid "Invalid"
msgstr "無効" msgstr "無効"
#: terminal/api/component/storage.py:131 terminal/tasks.py:149 #: terminal/api/component/storage.py:130 terminal/tasks.py:149
msgid "Test failure: {}" msgid "Test failure: {}"
msgstr "テスト失敗: {}" msgstr "テスト失敗: {}"
#: terminal/api/component/storage.py:134 #: terminal/api/component/storage.py:133
msgid "Test successful" msgid "Test successful"
msgstr "テスト成功" msgstr "テスト成功"
#: terminal/api/component/storage.py:136 #: terminal/api/component/storage.py:135
msgid "Test failure: Please check configuration" msgid "Test failure: Please check configuration"
msgstr "テストに失敗しました:構成を確認してください" msgstr "テストに失敗しました:構成を確認してください"
@ -7890,11 +7903,11 @@ msgstr "無効な承認アクション"
msgid "This user is not authorized to approve this ticket" msgid "This user is not authorized to approve this ticket"
msgstr "このユーザーはこの作業指示を承認する権限がありません" msgstr "このユーザーはこの作業指示を承認する権限がありません"
#: users/api/user.py:137 #: users/api/user.py:136
msgid "Can not invite self" msgid "Can not invite self"
msgstr "自分自身を招待することはできません" msgstr "自分自身を招待することはできません"
#: users/api/user.py:190 #: users/api/user.py:189
msgid "Could not reset self otp, use profile reset instead" msgid "Could not reset self otp, use profile reset instead"
msgstr "自己otpをリセットできませんでした、代わりにプロファイルリセットを使用" msgstr "自己otpをリセットできませんでした、代わりにプロファイルリセットを使用"
@ -9311,6 +9324,3 @@ msgstr "エンタープライズプロフェッショナル版"
#: xpack/plugins/license/models.py:86 #: xpack/plugins/license/models.py:86
msgid "Ultimate edition" msgid "Ultimate edition"
msgstr "エンタープライズ・フラッグシップ・エディション" msgstr "エンタープライズ・フラッグシップ・エディション"
#~ msgid "Reopen"
#~ msgstr "再オープン"

View File

@ -1,3 +1,3 @@
version https://git-lfs.github.com/spec/v1 version https://git-lfs.github.com/spec/v1
oid sha256:91ad10be95fda19937a09d07806d05f21057a1a79f40428350127d1162c7655d oid sha256:bd99d1b6018567413cefe5fe188a19019e09da46934c05ae9ce229943f712859
size 144168 size 144595

View File

@ -7,7 +7,7 @@ msgid ""
msgstr "" msgstr ""
"Project-Id-Version: JumpServer 0.3.3\n" "Project-Id-Version: JumpServer 0.3.3\n"
"Report-Msgid-Bugs-To: \n" "Report-Msgid-Bugs-To: \n"
"POT-Creation-Date: 2024-04-03 16:51+0800\n" "POT-Creation-Date: 2024-04-07 14:23+0800\n"
"PO-Revision-Date: 2021-05-20 10:54+0800\n" "PO-Revision-Date: 2021-05-20 10:54+0800\n"
"Last-Translator: ibuler <ibuler@qq.com>\n" "Last-Translator: ibuler <ibuler@qq.com>\n"
"Language-Team: JumpServer team<ibuler@qq.com>\n" "Language-Team: JumpServer team<ibuler@qq.com>\n"
@ -1355,11 +1355,11 @@ msgstr "應用程式"
msgid "Can match application" msgid "Can match application"
msgstr "匹配應用" msgstr "匹配應用"
#: assets/api/asset/asset.py:181 #: assets/api/asset/asset.py:180
msgid "Cannot create asset directly, you should create a host or other" msgid "Cannot create asset directly, you should create a host or other"
msgstr "不能直接創建資產, 你應該創建主機或其他資產" msgstr "不能直接創建資產, 你應該創建主機或其他資產"
#: assets/api/domain.py:68 #: assets/api/domain.py:67
msgid "Number required" msgid "Number required"
msgstr "需要為數字" msgstr "需要為數字"
@ -4087,17 +4087,28 @@ msgstr "任務 {} 不存在"
msgid "Task {} args or kwargs error" msgid "Task {} args or kwargs error"
msgstr "任務 {} 執行參數錯誤" msgstr "任務 {} 執行參數錯誤"
#: ops/api/job.py:146 msgid ""
"Asset ({asset}) must have at least one of the following protocols added: "
"SSH, SFTP, or WinRM"
msgstr "资产({asset})至少要添加ssh,sftp,winrm其中一种协议"
msgid "Asset ({asset}) authorization is missing SSH, SFTP, or WinRM protocol"
msgstr "资产({asset})授权缺少ssh,sftp或winrm协议"
msgid "Asset ({asset}) authorization lacks upload permissions"
msgstr "资产({asset})授权缺少上传权限"
#: ops/api/job.py:168
msgid "Duplicate file exists" msgid "Duplicate file exists"
msgstr "存在同名文件" msgstr "存在同名文件"
#: ops/api/job.py:151 #: ops/api/job.py:173
#, python-brace-format #, python-brace-format
msgid "" msgid ""
"File size exceeds maximum limit. Please select a file smaller than {limit}MB" "File size exceeds maximum limit. Please select a file smaller than {limit}MB"
msgstr "檔案大小超過最大限制。請選擇小於 {limit}MB 的文件。" msgstr "檔案大小超過最大限制。請選擇小於 {limit}MB 的文件。"
#: ops/api/job.py:215 #: ops/api/job.py:237
msgid "" msgid ""
"The task is being created and cannot be interrupted. Please try again later." "The task is being created and cannot be interrupted. Please try again later."
msgstr "正在創建任務,無法中斷,請稍後重試。" msgstr "正在創建任務,無法中斷,請稍後重試。"
@ -4485,17 +4496,17 @@ msgstr "Job ID"
msgid "Name of the job" msgid "Name of the job"
msgstr "Job 名稱" msgstr "Job 名稱"
#: orgs/api.py:62 #: orgs/api.py:61
msgid "The current organization ({}) cannot be deleted" msgid "The current organization ({}) cannot be deleted"
msgstr "當前組織 ({}) 不能被刪除" msgstr "當前組織 ({}) 不能被刪除"
#: orgs/api.py:67 #: orgs/api.py:66
msgid "" msgid ""
"LDAP synchronization is set to the current organization. Please switch to " "LDAP synchronization is set to the current organization. Please switch to "
"another organization before deleting" "another organization before deleting"
msgstr "LDAP 同步設定組織為當前組織,請切換其他組織後再進行刪除操作" msgstr "LDAP 同步設定組織為當前組織,請切換其他組織後再進行刪除操作"
#: orgs/api.py:77 #: orgs/api.py:76
msgid "The organization have resource ({}) cannot be deleted" msgid "The organization have resource ({}) cannot be deleted"
msgstr "組織存在資源 ({}) 不能被刪除" msgstr "組織存在資源 ({}) 不能被刪除"
@ -6436,11 +6447,11 @@ msgstr "企業版遠程應用,在社區版中不能使用"
msgid "Not found protocol query params" msgid "Not found protocol query params"
msgstr "未發現 protocol 查詢參數" msgstr "未發現 protocol 查詢參數"
#: terminal/api/component/storage.py:30 #: terminal/api/component/storage.py:31
msgid "Deleting the default storage is not allowed" msgid "Deleting the default storage is not allowed"
msgstr "不允許刪除默認儲存配置" msgstr "不允許刪除默認儲存配置"
#: terminal/api/component/storage.py:33 #: terminal/api/component/storage.py:34
msgid "Cannot delete storage that is being used" msgid "Cannot delete storage that is being used"
msgstr "不允許刪除正在使用的儲存配置" msgstr "不允許刪除正在使用的儲存配置"
@ -6452,15 +6463,15 @@ msgstr "命令儲存"
msgid "Invalid" msgid "Invalid"
msgstr "無效" msgstr "無效"
#: terminal/api/component/storage.py:131 terminal/tasks.py:149 #: terminal/api/component/storage.py:130 terminal/tasks.py:149
msgid "Test failure: {}" msgid "Test failure: {}"
msgstr "測試失敗: {}" msgstr "測試失敗: {}"
#: terminal/api/component/storage.py:134 #: terminal/api/component/storage.py:133
msgid "Test successful" msgid "Test successful"
msgstr "測試成功" msgstr "測試成功"
#: terminal/api/component/storage.py:136 #: terminal/api/component/storage.py:135
msgid "Test failure: Please check configuration" msgid "Test failure: Please check configuration"
msgstr "測試失敗:請檢查配置" msgstr "測試失敗:請檢查配置"
@ -7782,11 +7793,11 @@ msgstr "無效的審批動作"
msgid "This user is not authorized to approve this ticket" msgid "This user is not authorized to approve this ticket"
msgstr "此用戶無權審批此工單" msgstr "此用戶無權審批此工單"
#: users/api/user.py:137 #: users/api/user.py:136
msgid "Can not invite self" msgid "Can not invite self"
msgstr "不能邀請自己" msgstr "不能邀請自己"
#: users/api/user.py:190 #: users/api/user.py:189
msgid "Could not reset self otp, use profile reset instead" msgid "Could not reset self otp, use profile reset instead"
msgstr "不能在該頁面重設 MFA 多因子認證, 請去個人資訊頁面重設" msgstr "不能在該頁面重設 MFA 多因子認證, 請去個人資訊頁面重設"
@ -9183,6 +9194,3 @@ msgstr "企業專業版"
#: xpack/plugins/license/models.py:86 #: xpack/plugins/license/models.py:86
msgid "Ultimate edition" msgid "Ultimate edition"
msgstr "企業旗艦版" msgstr "企業旗艦版"
#~ msgid "Reopen"
#~ msgstr "重新打開"

View File

View File

@ -0,0 +1,89 @@
import concurrent.futures
import queue
import socket
import ansible_runner
from receptorctl import ReceptorControl
receptor_ctl = ReceptorControl('control.sock')
def init_receptor_ctl(sock_path):
global receptor_ctl
receptor_ctl = ReceptorControl(sock_path)
def nodes():
return receptor_ctl.simple_command("status").get("Advertisements", None)
def run(**kwargs):
receptor_runner = AnsibleReceptorRunner(**kwargs)
return receptor_runner.run()
class AnsibleReceptorRunner:
def __init__(self, **kwargs):
self.runner_params = kwargs
self.unit_id = None
def run(self):
input, output = socket.socketpair()
with concurrent.futures.ThreadPoolExecutor(max_workers=1) as executor:
transmitter_future = executor.submit(self.transmit, input)
result = receptor_ctl.submit_work(payload=output.makefile('rb'),
node='primary', worktype='ansible-runner')
input.close()
output.close()
self.unit_id = result['unitid']
transmitter_future.result()
result_file = receptor_ctl.get_work_results(self.unit_id, return_sockfile=True)
stdout_queue = queue.Queue()
with concurrent.futures.ThreadPoolExecutor(max_workers=1) as executor:
processor_future = executor.submit(self.processor, result_file, stdout_queue)
while not processor_future.done() or \
not stdout_queue.empty():
msg = stdout_queue.get()
if msg is None:
break
print(msg)
return processor_future.result()
def transmit(self, _socket):
try:
ansible_runner.run(
streamer='transmit',
_output=_socket.makefile('wb'),
**self.runner_params
)
finally:
_socket.shutdown(socket.SHUT_WR)
def processor(self, _result_file, stdout_queue):
try:
original_handler = self.runner_params.pop("event_handler", None)
def event_handler(data, **kwargs):
stdout = data.get('stdout', '')
if stdout:
stdout_queue.put(stdout)
if original_handler:
original_handler(data, **kwargs)
return ansible_runner.interface.run(
quite=True,
streamer='process',
_input=_result_file,
event_handler=event_handler,
**self.runner_params,
)
finally:
stdout_queue.put(None)

View File

@ -1,3 +1,4 @@
import logging
import os import os
import shutil import shutil
import uuid import uuid
@ -5,15 +6,35 @@ import uuid
import ansible_runner import ansible_runner
from django.conf import settings from django.conf import settings
from django.utils._os import safe_join from django.utils._os import safe_join
from django.utils.functional import LazyObject
from .callback import DefaultCallback from .callback import DefaultCallback
from .receptor import receptor_runner
from ..utils import get_ansible_log_verbosity from ..utils import get_ansible_log_verbosity
logger = logging.getLogger(__file__)
class CommandInBlackListException(Exception): class CommandInBlackListException(Exception):
pass pass
class AnsibleWrappedRunner(LazyObject):
def _setup(self):
self._wrapped = self.get_runner()
@staticmethod
def get_runner():
if settings.ANSIBLE_RECEPTOR_ENABLE and settings.ANSIBLE_RECEPTOR_SOCK_PATH:
logger.info("Ansible receptor enabled, run ansible task via receptor")
receptor_runner.init_receptor_ctl(settings.ANSIBLE_RECEPTOR_SOCK_PATH)
return receptor_runner
return ansible_runner
runner = AnsibleWrappedRunner()
class AdHocRunner: class AdHocRunner:
cmd_modules_choices = ('shell', 'raw', 'command', 'script', 'win_shell') cmd_modules_choices = ('shell', 'raw', 'command', 'script', 'win_shell')
@ -30,6 +51,8 @@ class AdHocRunner:
self.extra_vars = extra_vars self.extra_vars = extra_vars
self.dry_run = dry_run self.dry_run = dry_run
self.timeout = timeout self.timeout = timeout
# enable local connection
self.extra_vars.update({"LOCAL_CONNECTION_ENABLED": "1"})
def check_module(self): def check_module(self):
if self.module not in self.cmd_modules_choices: if self.module not in self.cmd_modules_choices:
@ -48,7 +71,7 @@ class AdHocRunner:
if os.path.exists(private_env): if os.path.exists(private_env):
shutil.rmtree(private_env) shutil.rmtree(private_env)
ansible_runner.run( runner.run(
timeout=self.timeout if self.timeout > 0 else None, timeout=self.timeout if self.timeout > 0 else None,
extravars=self.extra_vars, extravars=self.extra_vars,
host_pattern=self.pattern, host_pattern=self.pattern,
@ -81,7 +104,7 @@ class PlaybookRunner:
if os.path.exists(private_env): if os.path.exists(private_env):
shutil.rmtree(private_env) shutil.rmtree(private_env)
ansible_runner.run( runner.run(
private_data_dir=self.project_dir, private_data_dir=self.project_dir,
inventory=self.inventory, inventory=self.inventory,
playbook=self.playbook, playbook=self.playbook,
@ -112,7 +135,7 @@ class UploadFileRunner:
def run(self, verbosity=0, **kwargs): def run(self, verbosity=0, **kwargs):
verbosity = get_ansible_log_verbosity(verbosity) verbosity = get_ansible_log_verbosity(verbosity)
ansible_runner.run( runner.run(
host_pattern="*", host_pattern="*",
inventory=self.inventory, inventory=self.inventory,
module='copy', module='copy',

View File

@ -32,6 +32,9 @@ from ops.variables import JMS_JOB_VARIABLE_HELP
from orgs.mixins.api import OrgBulkModelViewSet from orgs.mixins.api import OrgBulkModelViewSet
from orgs.utils import tmp_to_org, get_current_org from orgs.utils import tmp_to_org, get_current_org
from accounts.models import Account from accounts.models import Account
from assets.const import Protocol
from perms.const import ActionChoices
from perms.utils.asset_perm import PermAssetDetailUtil
from perms.models import PermNode from perms.models import PermNode
from perms.utils import UserPermAssetUtil from perms.utils import UserPermAssetUtil
from jumpserver.settings import get_file_md5 from jumpserver.settings import get_file_md5
@ -72,6 +75,22 @@ class JobViewSet(OrgBulkModelViewSet):
return self.permission_denied(request, "Command execution disabled") return self.permission_denied(request, "Command execution disabled")
return super().check_permissions(request) return super().check_permissions(request)
def check_upload_permission(self, assets, account_name):
protocols_required = {Protocol.ssh, Protocol.sftp, Protocol.winrm}
error_msg_missing_protocol = _(
"Asset ({asset}) must have at least one of the following protocols added: SSH, SFTP, or WinRM")
error_msg_auth_missing_protocol = _("Asset ({asset}) authorization is missing SSH, SFTP, or WinRM protocol")
error_msg_auth_missing_upload = _("Asset ({asset}) authorization lacks upload permissions")
for asset in assets:
protocols = asset.protocols.values_list("name", flat=True)
if not set(protocols).intersection(protocols_required):
self.permission_denied(self.request, error_msg_missing_protocol.format(asset=asset.name))
util = PermAssetDetailUtil(self.request.user, asset)
if not util.check_perm_protocols(protocols_required):
self.permission_denied(self.request, error_msg_auth_missing_protocol.format(asset=asset.name))
if not util.check_perm_actions(account_name, [ActionChoices.upload.value]):
self.permission_denied(self.request, error_msg_auth_missing_upload.format(asset=asset.name))
def get_queryset(self): def get_queryset(self):
queryset = super().get_queryset() queryset = super().get_queryset()
queryset = queryset \ queryset = queryset \
@ -89,6 +108,9 @@ class JobViewSet(OrgBulkModelViewSet):
assets = serializer.validated_data.get('assets') assets = serializer.validated_data.get('assets')
assets = merge_nodes_and_assets(node_ids, assets, self.request.user) assets = merge_nodes_and_assets(node_ids, assets, self.request.user)
serializer.validated_data['assets'] = assets serializer.validated_data['assets'] = assets
if serializer.validated_data.get('type') == Types.upload_file:
account_name = serializer.validated_data.get('runas')
self.check_upload_permission(assets, account_name)
instance = serializer.save() instance = serializer.save()
if instance.instant or run_after_save: if instance.instant or run_after_save:

View File

@ -42,6 +42,10 @@ class ActionChoices(BitChoices):
def contains(cls, total, action_value): def contains(cls, total, action_value):
return action_value & total == action_value return action_value & total == action_value
@classmethod
def contains_all(cls, total, action_values):
return all(cls.contains(total, action) for action in action_values)
@classmethod @classmethod
def display(cls, value): def display(cls, value):
return ', '.join([str(c.label) for c in cls if c.value & value == c.value]) return ', '.join([str(c.label) for c in cls if c.value & value == c.value])

View File

@ -6,6 +6,7 @@ from assets.models import Asset
from common.utils import lazyproperty from common.utils import lazyproperty
from orgs.utils import tmp_to_org, tmp_to_root_org from orgs.utils import tmp_to_org, tmp_to_root_org
from .permission import AssetPermissionUtil from .permission import AssetPermissionUtil
from perms.const import ActionChoices
__all__ = ['PermAssetDetailUtil'] __all__ = ['PermAssetDetailUtil']
@ -137,3 +138,23 @@ class PermAssetDetailUtil:
account.date_expired = max(cleaned_accounts_expired[account]) account.date_expired = max(cleaned_accounts_expired[account])
accounts.append(account) accounts.append(account)
return accounts return accounts
def check_perm_protocols(self, protocols):
"""
检查用户是否有某些协议权限
:param protocols: set
"""
perms_protocols = self.get_permed_protocols_for_user(only_name=True)
if "all" in perms_protocols:
return True
return protocols.intersection(perms_protocols)
def check_perm_actions(self, account_name, actions):
"""
检查用户是否有某个账号的某个资产操作权限
:param account_name: str
:param actions: list
"""
perms = self.user_asset_perms
action_bit_mapper, __ = self.parse_alias_action_date_expire(perms, self.asset)
return ActionChoices.contains_all(action_bit_mapper.get(account_name, 0), actions)

View File

@ -56,6 +56,7 @@ class RoleBinding(JMSBaseModel):
on_delete=models.CASCADE, verbose_name=_('Organization') on_delete=models.CASCADE, verbose_name=_('Organization')
) )
objects = RoleBindingManager() objects = RoleBindingManager()
objects_raw = models.Manager()
class Meta: class Meta:
verbose_name = _('Role binding') verbose_name = _('Role binding')

View File

@ -22,8 +22,7 @@ from ..models import User
from ..notifications import ResetMFAMsg from ..notifications import ResetMFAMsg
from ..permissions import UserObjectPermission from ..permissions import UserObjectPermission
from ..serializers import ( from ..serializers import (
UserSerializer, UserSerializer, MiniUserSerializer, InviteSerializer, UserRetrieveSerializer
MiniUserSerializer, InviteSerializer
) )
from ..signals import post_user_create from ..signals import post_user_create
@ -43,6 +42,7 @@ class UserViewSet(CommonApiMixin, UserQuerysetMixin, SuggestionMixin, BulkModelV
'default': UserSerializer, 'default': UserSerializer,
'suggestion': MiniUserSerializer, 'suggestion': MiniUserSerializer,
'invite': InviteSerializer, 'invite': InviteSerializer,
'retrieve': UserRetrieveSerializer,
} }
rbac_perms = { rbac_perms = {
'match': 'users.match_user', 'match': 'users.match_user',

View File

@ -5,6 +5,7 @@ import base64
import datetime import datetime
import uuid import uuid
from typing import Callable from typing import Callable
from collections import defaultdict
import sshpubkeys import sshpubkeys
from django.conf import settings from django.conf import settings
@ -27,6 +28,7 @@ from common.utils import (
from labels.mixins import LabeledMixin from labels.mixins import LabeledMixin
from orgs.utils import current_org from orgs.utils import current_org
from rbac.const import Scope from rbac.const import Scope
from rbac.models import RoleBinding
from ..signals import ( from ..signals import (
post_user_change_password, post_user_leave_org, pre_user_leave_org post_user_change_password, post_user_leave_org, pre_user_leave_org
) )
@ -926,6 +928,14 @@ class User(AuthMixin, TokenMixin, RoleMixin, MFAMixin, LabeledMixin, JSONFilterM
def is_local(self): def is_local(self):
return self.source == self.Source.local.value return self.source == self.Source.local.value
@property
def orgs_roles(self):
orgs_roles = defaultdict(set)
rbs = RoleBinding.objects_raw.filter(user=self, scope='org').prefetch_related('role', 'org')
for rb in rbs:
orgs_roles[rb.org_name].add(str(rb.role.display_name))
return orgs_roles
def is_password_authenticate(self): def is_password_authenticate(self):
cas = self.Source.cas cas = self.Source.cas
saml2 = self.Source.saml2 saml2 = self.Source.saml2

View File

@ -12,6 +12,7 @@ from common.serializers.fields import (
) )
from common.utils import pretty_string, get_logger from common.utils import pretty_string, get_logger
from common.validators import PhoneValidator from common.validators import PhoneValidator
from orgs.utils import current_org
from rbac.builtin import BuiltinRole from rbac.builtin import BuiltinRole
from rbac.models import OrgRoleBinding, SystemRoleBinding, Role from rbac.models import OrgRoleBinding, SystemRoleBinding, Role
from rbac.permissions import RBACPermission from rbac.permissions import RBACPermission
@ -23,6 +24,7 @@ __all__ = [
"MiniUserSerializer", "MiniUserSerializer",
"InviteSerializer", "InviteSerializer",
"ServiceAccountSerializer", "ServiceAccountSerializer",
"UserRetrieveSerializer",
] ]
logger = get_logger(__file__) logger = get_logger(__file__)
@ -46,6 +48,7 @@ class RolesSerializerMixin(serializers.Serializer):
label=_("Org roles"), many=True, required=False, label=_("Org roles"), many=True, required=False,
default=default_org_roles default=default_org_roles
) )
orgs_roles = serializers.JSONField(read_only=True, label=_("Organization and roles relations"))
def pop_roles_if_need(self, fields): def pop_roles_if_need(self, fields):
request = self.context.get("request") request = self.context.get("request")
@ -58,7 +61,7 @@ class RolesSerializerMixin(serializers.Serializer):
model_cls_field_mapper = { model_cls_field_mapper = {
SystemRoleBinding: ["system_roles"], SystemRoleBinding: ["system_roles"],
OrgRoleBinding: ["org_roles"], OrgRoleBinding: ["org_roles", "orgs_roles"],
} }
update_actions = ("partial_bulk_update", "bulk_update", "partial_update", "update") update_actions = ("partial_bulk_update", "bulk_update", "partial_update", "update")
@ -156,6 +159,7 @@ class UserSerializer(RolesSerializerMixin, CommonBulkSerializerMixin, ResourceLa
"is_first_login", "wecom_id", "dingtalk_id", "is_first_login", "wecom_id", "dingtalk_id",
"feishu_id", "lark_id", "date_api_key_last_used", "feishu_id", "lark_id", "date_api_key_last_used",
] ]
fields_only_root_org = ["orgs_roles"]
disallow_self_update_fields = ["is_active", "system_roles", "org_roles"] disallow_self_update_fields = ["is_active", "system_roles", "org_roles"]
extra_kwargs = { extra_kwargs = {
"password": { "password": {
@ -178,6 +182,17 @@ class UserSerializer(RolesSerializerMixin, CommonBulkSerializerMixin, ResourceLa
'mfa_level': {'label': _("MFA level")}, 'mfa_level': {'label': _("MFA level")},
} }
def get_fields(self):
fields = super().get_fields()
self.pop_fields_if_need(fields)
return fields
def pop_fields_if_need(self, fields):
# pop only root org fields
if not current_org.is_root():
for f in self.Meta.fields_only_root_org:
fields.pop(f, None)
def validate_password(self, password): def validate_password(self, password):
password_strategy = self.initial_data.get("password_strategy") password_strategy = self.initial_data.get("password_strategy")
if self.instance is None and password_strategy != PasswordStrategy.custom: if self.instance is None and password_strategy != PasswordStrategy.custom:
@ -273,7 +288,7 @@ class UserRetrieveSerializer(UserSerializer):
) )
class Meta(UserSerializer.Meta): class Meta(UserSerializer.Meta):
fields = UserSerializer.Meta.fields + ["login_confirm_settings"] fields = UserSerializer.Meta.fields + ["login_confirm_settings", "orgs_roles"]
class MiniUserSerializer(serializers.ModelSerializer): class MiniUserSerializer(serializers.ModelSerializer):

2
jms
View File

@ -188,7 +188,7 @@ if __name__ == '__main__':
) )
parser.add_argument( parser.add_argument(
"services", type=str, default='all', nargs="*", "services", type=str, default='all', nargs="*",
choices=("all", "web", "task"), choices=("all", "web", "task", "receptor"),
help="The service to start", help="The service to start",
) )
parser.add_argument('-d', '--daemon', nargs="?", const=True) parser.add_argument('-d', '--daemon', nargs="?", const=True)

61
poetry.lock generated
View File

@ -1,4 +1,4 @@
# This file is automatically @generated by Poetry 1.5.1 and should not be changed by hand. # This file is automatically @generated by Poetry 1.7.1 and should not be changed by hand.
[[package]] [[package]]
name = "adal" name = "adal"
@ -2836,14 +2836,8 @@ files = [
[package.dependencies] [package.dependencies]
google-auth = ">=2.14.1,<3.0.dev0" google-auth = ">=2.14.1,<3.0.dev0"
googleapis-common-protos = ">=1.56.2,<2.0.dev0" googleapis-common-protos = ">=1.56.2,<2.0.dev0"
grpcio = [ grpcio = {version = ">=1.49.1,<2.0dev", optional = true, markers = "python_version >= \"3.11\" and extra == \"grpc\""}
{version = ">=1.33.2,<2.0dev", optional = true, markers = "extra == \"grpc\""}, grpcio-status = {version = ">=1.49.1,<2.0.dev0", optional = true, markers = "python_version >= \"3.11\" and extra == \"grpc\""}
{version = ">=1.49.1,<2.0dev", optional = true, markers = "python_version >= \"3.11\" and extra == \"grpc\""},
]
grpcio-status = [
{version = ">=1.33.2,<2.0.dev0", optional = true, markers = "extra == \"grpc\""},
{version = ">=1.49.1,<2.0.dev0", optional = true, markers = "python_version >= \"3.11\" and extra == \"grpc\""},
]
protobuf = ">=3.19.5,<3.20.0 || >3.20.0,<3.20.1 || >3.20.1,<4.21.0 || >4.21.0,<4.21.1 || >4.21.1,<4.21.2 || >4.21.2,<4.21.3 || >4.21.3,<4.21.4 || >4.21.4,<4.21.5 || >4.21.5,<5.0.0.dev0" protobuf = ">=3.19.5,<3.20.0 || >3.20.0,<3.20.1 || >3.20.1,<4.21.0 || >4.21.0,<4.21.1 || >4.21.1,<4.21.2 || >4.21.2,<4.21.3 || >4.21.3,<4.21.4 || >4.21.4,<4.21.5 || >4.21.5,<5.0.0.dev0"
requests = ">=2.18.0,<3.0.0.dev0" requests = ">=2.18.0,<3.0.0.dev0"
@ -4172,6 +4166,7 @@ files = [
{file = "msgpack-1.0.8-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:5fbb160554e319f7b22ecf530a80a3ff496d38e8e07ae763b9e82fadfe96f273"}, {file = "msgpack-1.0.8-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:5fbb160554e319f7b22ecf530a80a3ff496d38e8e07ae763b9e82fadfe96f273"},
{file = "msgpack-1.0.8-cp39-cp39-win32.whl", hash = "sha256:f9af38a89b6a5c04b7d18c492c8ccf2aee7048aff1ce8437c4683bb5a1df893d"}, {file = "msgpack-1.0.8-cp39-cp39-win32.whl", hash = "sha256:f9af38a89b6a5c04b7d18c492c8ccf2aee7048aff1ce8437c4683bb5a1df893d"},
{file = "msgpack-1.0.8-cp39-cp39-win_amd64.whl", hash = "sha256:ed59dd52075f8fc91da6053b12e8c89e37aa043f8986efd89e61fae69dc1b011"}, {file = "msgpack-1.0.8-cp39-cp39-win_amd64.whl", hash = "sha256:ed59dd52075f8fc91da6053b12e8c89e37aa043f8986efd89e61fae69dc1b011"},
{file = "msgpack-1.0.8-py3-none-any.whl", hash = "sha256:24f727df1e20b9876fa6e95f840a2a2651e34c0ad147676356f4bf5fbb0206ca"},
{file = "msgpack-1.0.8.tar.gz", hash = "sha256:95c02b0e27e706e48d0e5426d1710ca78e0f0628d6e89d5b5a5b91a5f12274f3"}, {file = "msgpack-1.0.8.tar.gz", hash = "sha256:95c02b0e27e706e48d0e5426d1710ca78e0f0628d6e89d5b5a5b91a5f12274f3"},
] ]
@ -5814,11 +5809,9 @@ files = [
{file = "pymssql-2.2.8-cp310-cp310-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:049f2e3de919e8e02504780a21ebbf235e21ca8ed5c7538c5b6e705aa6c43d8c"}, {file = "pymssql-2.2.8-cp310-cp310-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:049f2e3de919e8e02504780a21ebbf235e21ca8ed5c7538c5b6e705aa6c43d8c"},
{file = "pymssql-2.2.8-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0dd86d8e3e346e34f3f03d12e333747b53a1daa74374a727f4714d5b82ee0dd5"}, {file = "pymssql-2.2.8-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0dd86d8e3e346e34f3f03d12e333747b53a1daa74374a727f4714d5b82ee0dd5"},
{file = "pymssql-2.2.8-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:508226a0df7cb6faeda9f8e84e85743690ca427d7b27af9a73d75fcf0c1eef6e"}, {file = "pymssql-2.2.8-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:508226a0df7cb6faeda9f8e84e85743690ca427d7b27af9a73d75fcf0c1eef6e"},
{file = "pymssql-2.2.8-cp310-cp310-win_amd64.whl", hash = "sha256:47859887adeaf184766b5e0bc845dd23611f3808f9521552063bb36eabc10092"},
{file = "pymssql-2.2.8-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:d873e553374d5b1c57fe1c43bb75e3bcc2920678db1ef26f6bfed396c7d21b30"}, {file = "pymssql-2.2.8-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:d873e553374d5b1c57fe1c43bb75e3bcc2920678db1ef26f6bfed396c7d21b30"},
{file = "pymssql-2.2.8-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bf31b8b76634c826a91f9999e15b7bfb0c051a0f53b319fd56481a67e5b903bb"}, {file = "pymssql-2.2.8-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bf31b8b76634c826a91f9999e15b7bfb0c051a0f53b319fd56481a67e5b903bb"},
{file = "pymssql-2.2.8-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:821945c2214fe666fd456c61e09a29a00e7719c9e136c801bffb3a254e9c579b"}, {file = "pymssql-2.2.8-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:821945c2214fe666fd456c61e09a29a00e7719c9e136c801bffb3a254e9c579b"},
{file = "pymssql-2.2.8-cp311-cp311-win_amd64.whl", hash = "sha256:cc85b609b4e60eac25fa38bbac1ff854fd2c2a276e0ca4a3614c6f97efb644bb"},
{file = "pymssql-2.2.8-cp36-cp36m-macosx_10_14_x86_64.whl", hash = "sha256:ebe7f64d5278d807f14bea08951e02512bfbc6219fd4d4f15bb45ded885cf3d4"}, {file = "pymssql-2.2.8-cp36-cp36m-macosx_10_14_x86_64.whl", hash = "sha256:ebe7f64d5278d807f14bea08951e02512bfbc6219fd4d4f15bb45ded885cf3d4"},
{file = "pymssql-2.2.8-cp36-cp36m-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:253af3d39fc0235627966817262d5c4c94ad09dcbea59664748063470048c29c"}, {file = "pymssql-2.2.8-cp36-cp36m-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:253af3d39fc0235627966817262d5c4c94ad09dcbea59664748063470048c29c"},
{file = "pymssql-2.2.8-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2c9d109df536dc5f7dd851a88d285a4c9cb12a9314b621625f4f5ab1197eb312"}, {file = "pymssql-2.2.8-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2c9d109df536dc5f7dd851a88d285a4c9cb12a9314b621625f4f5ab1197eb312"},
@ -5834,13 +5827,11 @@ files = [
{file = "pymssql-2.2.8-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3906993300650844ec140aa58772c0f5f3e9e9d5709c061334fd1551acdcf066"}, {file = "pymssql-2.2.8-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3906993300650844ec140aa58772c0f5f3e9e9d5709c061334fd1551acdcf066"},
{file = "pymssql-2.2.8-cp38-cp38-manylinux_2_28_x86_64.whl", hash = "sha256:7309c7352e4a87c9995c3183ebfe0ff4135e955bb759109637673c61c9f0ca8d"}, {file = "pymssql-2.2.8-cp38-cp38-manylinux_2_28_x86_64.whl", hash = "sha256:7309c7352e4a87c9995c3183ebfe0ff4135e955bb759109637673c61c9f0ca8d"},
{file = "pymssql-2.2.8-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:9b8d603cc1ec7ae585c5a409a1d45e8da067970c79dd550d45c238ae0aa0f79f"}, {file = "pymssql-2.2.8-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:9b8d603cc1ec7ae585c5a409a1d45e8da067970c79dd550d45c238ae0aa0f79f"},
{file = "pymssql-2.2.8-cp38-cp38-win_amd64.whl", hash = "sha256:293cb4d0339e221d877d6b19a1905082b658f0100a1e2ccc9dda10de58938901"},
{file = "pymssql-2.2.8-cp39-cp39-macosx_11_0_x86_64.whl", hash = "sha256:895041edd002a2e91d8a4faf0906b6fbfef29d9164bc6beb398421f5927fa40e"}, {file = "pymssql-2.2.8-cp39-cp39-macosx_11_0_x86_64.whl", hash = "sha256:895041edd002a2e91d8a4faf0906b6fbfef29d9164bc6beb398421f5927fa40e"},
{file = "pymssql-2.2.8-cp39-cp39-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:6b2d9c6d38a416c6f2db36ff1cd8e69f9a5387a46f9f4f612623192e0c9404b1"}, {file = "pymssql-2.2.8-cp39-cp39-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:6b2d9c6d38a416c6f2db36ff1cd8e69f9a5387a46f9f4f612623192e0c9404b1"},
{file = "pymssql-2.2.8-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d63d6f25cf40fe6a03c49be2d4d337858362b8ab944d6684c268e4990807cf0c"}, {file = "pymssql-2.2.8-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d63d6f25cf40fe6a03c49be2d4d337858362b8ab944d6684c268e4990807cf0c"},
{file = "pymssql-2.2.8-cp39-cp39-manylinux_2_28_x86_64.whl", hash = "sha256:c83ad3ad20951f3a94894b354fa5fa9666dcd5ebb4a635dad507c7d1dd545833"}, {file = "pymssql-2.2.8-cp39-cp39-manylinux_2_28_x86_64.whl", hash = "sha256:c83ad3ad20951f3a94894b354fa5fa9666dcd5ebb4a635dad507c7d1dd545833"},
{file = "pymssql-2.2.8-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:3933f7f082be74698eea835df51798dab9bc727d94d3d280bffc75ab9265f890"}, {file = "pymssql-2.2.8-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:3933f7f082be74698eea835df51798dab9bc727d94d3d280bffc75ab9265f890"},
{file = "pymssql-2.2.8-cp39-cp39-win_amd64.whl", hash = "sha256:de313375b90b0f554058992f35c4a4beb3f6ec2f5912d8cd6afb649f95b03a9f"},
{file = "pymssql-2.2.8.tar.gz", hash = "sha256:9baefbfbd07d0142756e2dfcaa804154361ac5806ab9381350aad4e780c3033e"}, {file = "pymssql-2.2.8.tar.gz", hash = "sha256:9baefbfbd07d0142756e2dfcaa804154361ac5806ab9381350aad4e780c3033e"},
] ]
@ -6409,6 +6400,27 @@ type = "legacy"
url = "https://pypi.tuna.tsinghua.edu.cn/simple" url = "https://pypi.tuna.tsinghua.edu.cn/simple"
reference = "tsinghua" reference = "tsinghua"
[[package]]
name = "receptorctl"
version = "1.4.5"
description = "\"Receptorctl is a front-end CLI and importable Python library that interacts with Receptor over its control socket interface.\""
optional = false
python-versions = "*"
files = [
{file = "receptorctl-1.4.5-py3-none-any.whl", hash = "sha256:e12a6b6f703c1bc7ec13bbf46adf1c3c0e5785af4136fc776fbc68b349a6dc8c"},
{file = "receptorctl-1.4.5.tar.gz", hash = "sha256:d1765a1d68e82d101d500385be8830c647c14dba783c5c01a915015dc8484a30"},
]
[package.dependencies]
click = "*"
python-dateutil = "*"
pyyaml = "*"
[package.source]
type = "legacy"
url = "https://pypi.tuna.tsinghua.edu.cn/simple"
reference = "tsinghua"
[[package]] [[package]]
name = "redis" name = "redis"
version = "5.0.3" version = "5.0.3"
@ -7418,6 +7430,27 @@ type = "legacy"
url = "https://pypi.tuna.tsinghua.edu.cn/simple" url = "https://pypi.tuna.tsinghua.edu.cn/simple"
reference = "tsinghua" reference = "tsinghua"
[[package]]
name = "volcengine-python-sdk"
version = "1.0.71"
description = "Volcengine SDK for Python"
optional = false
python-versions = "*"
files = [
{file = "volcengine-python-sdk-1.0.71.tar.gz", hash = "sha256:2f9addb68dfebd9c0c79551599eaf3a45957499d8975692d80901b6f89f5d751"},
]
[package.dependencies]
certifi = ">=2017.4.17"
python-dateutil = ">=2.1"
six = ">=1.10"
urllib3 = ">=1.23"
[package.source]
type = "legacy"
url = "https://pypi.tuna.tsinghua.edu.cn/simple"
reference = "tsinghua"
[[package]] [[package]]
name = "wcwidth" name = "wcwidth"
version = "0.2.13" version = "0.2.13"
@ -7876,4 +7909,4 @@ reference = "tsinghua"
[metadata] [metadata]
lock-version = "2.0" lock-version = "2.0"
python-versions = "^3.11" python-versions = "^3.11"
content-hash = "fb0541ac9e68b6395b1b151dda57caf4e05d45ca072ae2fec659ad0886cf002d" content-hash = "1a8e1ea4acc0bfded274acb3b0faa65693a067bf280affaa195fe5cfb970777a"

View File

@ -149,6 +149,7 @@ xlsxwriter = "^3.1.9"
exchangelib = "^5.1.0" exchangelib = "^5.1.0"
xmlsec = "^1.3.13" xmlsec = "^1.3.13"
lxml = "4.9.3" lxml = "4.9.3"
receptorctl = "^1.4.5"
[tool.poetry.group.xpack.dependencies] [tool.poetry.group.xpack.dependencies]
@ -173,6 +174,7 @@ psycopg2 = "2.9.6"
ucloud-sdk-python3 = "0.11.50" ucloud-sdk-python3 = "0.11.50"
huaweicloudsdkecs = "3.1.52" huaweicloudsdkecs = "3.1.52"
huaweicloudsdkcore = "3.1.52" huaweicloudsdkcore = "3.1.52"
volcengine-python-sdk = "1.0.71"
[[tool.poetry.source]] [[tool.poetry.source]]
name = "tsinghua" name = "tsinghua"