mirror of https://github.com/jumpserver/jumpserver
Merge branch 'dev' of github.com:jumpserver/jumpserver into pr@dev@perf_custom_change_pwd
commit
3d1c5411f9
11
Dockerfile
11
Dockerfile
|
@ -1,4 +1,4 @@
|
||||||
FROM jumpserver/core-base:20240924_031841 AS stage-build
|
FROM jumpserver/core-base:20241022_070738 AS stage-build
|
||||||
|
|
||||||
ARG VERSION
|
ARG VERSION
|
||||||
|
|
||||||
|
@ -24,30 +24,27 @@ ENV LANG=en_US.UTF-8 \
|
||||||
PATH=/opt/py3/bin:$PATH
|
PATH=/opt/py3/bin:$PATH
|
||||||
|
|
||||||
ARG DEPENDENCIES=" \
|
ARG DEPENDENCIES=" \
|
||||||
libldap2-dev \
|
|
||||||
libx11-dev"
|
libx11-dev"
|
||||||
|
|
||||||
ARG TOOLS=" \
|
ARG TOOLS=" \
|
||||||
ca-certificates \
|
ca-certificates \
|
||||||
default-libmysqlclient-dev \
|
|
||||||
openssh-client \
|
openssh-client \
|
||||||
sshpass \
|
sshpass \
|
||||||
bubblewrap"
|
bubblewrap"
|
||||||
|
|
||||||
ARG APT_MIRROR=http://deb.debian.org
|
ARG APT_MIRROR=http://deb.debian.org
|
||||||
|
|
||||||
RUN set -ex \
|
RUN set -ex \
|
||||||
&& rm -f /etc/apt/apt.conf.d/docker-clean \
|
|
||||||
&& sed -i "s@http://.*.debian.org@${APT_MIRROR}@g" /etc/apt/sources.list \
|
&& sed -i "s@http://.*.debian.org@${APT_MIRROR}@g" /etc/apt/sources.list \
|
||||||
&& ln -sf /usr/share/zoneinfo/Asia/Shanghai /etc/localtime \
|
&& ln -sf /usr/share/zoneinfo/Asia/Shanghai /etc/localtime \
|
||||||
&& apt-get update > /dev/null \
|
&& apt-get update > /dev/null \
|
||||||
&& apt-get -y install --no-install-recommends ${DEPENDENCIES} \
|
&& apt-get -y install --no-install-recommends ${DEPENDENCIES} \
|
||||||
&& apt-get -y install --no-install-recommends ${TOOLS} \
|
&& apt-get -y install --no-install-recommends ${TOOLS} \
|
||||||
&& apt-get clean \
|
|
||||||
&& mkdir -p /root/.ssh/ \
|
&& mkdir -p /root/.ssh/ \
|
||||||
&& echo "Host *\n\tStrictHostKeyChecking no\n\tUserKnownHostsFile /dev/null\n\tCiphers +aes128-cbc\n\tKexAlgorithms +diffie-hellman-group1-sha1\n\tHostKeyAlgorithms +ssh-rsa" > /root/.ssh/config \
|
&& echo "Host *\n\tStrictHostKeyChecking no\n\tUserKnownHostsFile /dev/null\n\tCiphers +aes128-cbc\n\tKexAlgorithms +diffie-hellman-group1-sha1\n\tHostKeyAlgorithms +ssh-rsa" > /root/.ssh/config \
|
||||||
&& echo "no" | dpkg-reconfigure dash \
|
&& echo "no" | dpkg-reconfigure dash \
|
||||||
&& sed -i "s@# export @export @g" ~/.bashrc \
|
&& apt-get clean all \
|
||||||
&& sed -i "s@# alias @alias @g" ~/.bashrc
|
&& rm -rf /var/lib/apt/lists/*
|
||||||
|
|
||||||
COPY --from=stage-build /opt /opt
|
COPY --from=stage-build /opt /opt
|
||||||
COPY --from=stage-build /usr/local/bin /usr/local/bin
|
COPY --from=stage-build /usr/local/bin /usr/local/bin
|
||||||
|
|
|
@ -5,18 +5,10 @@ ARG TARGETARCH
|
||||||
ARG DEPENDENCIES=" \
|
ARG DEPENDENCIES=" \
|
||||||
ca-certificates \
|
ca-certificates \
|
||||||
wget \
|
wget \
|
||||||
g++ \
|
gettext"
|
||||||
make \
|
|
||||||
pkg-config \
|
|
||||||
default-libmysqlclient-dev \
|
|
||||||
freetds-dev \
|
|
||||||
gettext \
|
|
||||||
libkrb5-dev \
|
|
||||||
libldap2-dev \
|
|
||||||
libsasl2-dev"
|
|
||||||
|
|
||||||
|
|
||||||
ARG APT_MIRROR=http://deb.debian.org
|
ARG APT_MIRROR=http://deb.debian.org
|
||||||
|
|
||||||
RUN --mount=type=cache,target=/var/cache/apt,sharing=locked,id=core \
|
RUN --mount=type=cache,target=/var/cache/apt,sharing=locked,id=core \
|
||||||
--mount=type=cache,target=/var/lib/apt,sharing=locked,id=core \
|
--mount=type=cache,target=/var/lib/apt,sharing=locked,id=core \
|
||||||
set -ex \
|
set -ex \
|
||||||
|
@ -27,9 +19,8 @@ RUN --mount=type=cache,target=/var/cache/apt,sharing=locked,id=core \
|
||||||
&& apt-get -y install --no-install-recommends ${DEPENDENCIES} \
|
&& apt-get -y install --no-install-recommends ${DEPENDENCIES} \
|
||||||
&& echo "no" | dpkg-reconfigure dash
|
&& echo "no" | dpkg-reconfigure dash
|
||||||
|
|
||||||
|
|
||||||
# Install bin tools
|
# Install bin tools
|
||||||
ARG CHECK_VERSION=v1.0.3
|
ARG CHECK_VERSION=v1.0.4
|
||||||
RUN set -ex \
|
RUN set -ex \
|
||||||
&& wget https://github.com/jumpserver-dev/healthcheck/releases/download/${CHECK_VERSION}/check-${CHECK_VERSION}-linux-${TARGETARCH}.tar.gz \
|
&& wget https://github.com/jumpserver-dev/healthcheck/releases/download/${CHECK_VERSION}/check-${CHECK_VERSION}-linux-${TARGETARCH}.tar.gz \
|
||||||
&& tar -xf check-${CHECK_VERSION}-linux-${TARGETARCH}.tar.gz \
|
&& tar -xf check-${CHECK_VERSION}-linux-${TARGETARCH}.tar.gz \
|
||||||
|
@ -38,14 +29,13 @@ RUN set -ex \
|
||||||
&& chmod 755 /usr/local/bin/check \
|
&& chmod 755 /usr/local/bin/check \
|
||||||
&& rm -f check-${CHECK_VERSION}-linux-${TARGETARCH}.tar.gz
|
&& rm -f check-${CHECK_VERSION}-linux-${TARGETARCH}.tar.gz
|
||||||
|
|
||||||
|
|
||||||
# Install Python dependencies
|
# Install Python dependencies
|
||||||
WORKDIR /opt/jumpserver
|
WORKDIR /opt/jumpserver
|
||||||
|
|
||||||
ARG PIP_MIRROR=https://pypi.org/simple
|
ARG PIP_MIRROR=https://pypi.org/simple
|
||||||
ENV ANSIBLE_COLLECTIONS_PATHS=/opt/py3/lib/python3.11/site-packages/ansible_collections
|
ENV ANSIBLE_COLLECTIONS_PATHS=/opt/py3/lib/python3.11/site-packages/ansible_collections
|
||||||
|
|
||||||
RUN --mount=type=cache,target=/root/.cache,sharing=locked,id=core \
|
RUN --mount=type=cache,target=/root/.cache \
|
||||||
--mount=type=bind,source=poetry.lock,target=poetry.lock \
|
--mount=type=bind,source=poetry.lock,target=poetry.lock \
|
||||||
--mount=type=bind,source=pyproject.toml,target=pyproject.toml \
|
--mount=type=bind,source=pyproject.toml,target=pyproject.toml \
|
||||||
--mount=type=bind,source=utils/clean_site_packages.sh,target=clean_site_packages.sh \
|
--mount=type=bind,source=utils/clean_site_packages.sh,target=clean_site_packages.sh \
|
||||||
|
|
|
@ -15,14 +15,11 @@ ARG TOOLS=" \
|
||||||
vim \
|
vim \
|
||||||
wget"
|
wget"
|
||||||
|
|
||||||
ARG APT_MIRROR=http://deb.debian.org
|
|
||||||
RUN set -ex \
|
RUN set -ex \
|
||||||
&& rm -f /etc/apt/apt.conf.d/docker-clean \
|
|
||||||
&& echo 'Binary::apt::APT::Keep-Downloaded-Packages "true";' > /etc/apt/apt.conf.d/keep-cache \
|
|
||||||
&& sed -i "s@http://.*.debian.org@${APT_MIRROR}@g" /etc/apt/sources.list \
|
|
||||||
&& apt-get update \
|
&& apt-get update \
|
||||||
&& apt-get -y install --no-install-recommends ${TOOLS} \
|
&& apt-get -y install --no-install-recommends ${TOOLS} \
|
||||||
&& echo "no" | dpkg-reconfigure dash
|
&& apt-get clean all \
|
||||||
|
&& rm -rf /var/lib/apt/lists/*
|
||||||
|
|
||||||
WORKDIR /opt/jumpserver
|
WORKDIR /opt/jumpserver
|
||||||
|
|
||||||
|
|
|
@ -13,20 +13,17 @@ from authentication.const import ConfirmType
|
||||||
from authentication.mixins import AuthMixin
|
from authentication.mixins import AuthMixin
|
||||||
from authentication.permissions import UserConfirmation
|
from authentication.permissions import UserConfirmation
|
||||||
from common.sdk.im.wecom import URL
|
from common.sdk.im.wecom import URL
|
||||||
from common.sdk.im.wecom import WeCom
|
from common.sdk.im.wecom import WeCom, wecom_tool
|
||||||
from common.utils import get_logger
|
from common.utils import get_logger
|
||||||
from common.utils.common import get_request_ip
|
|
||||||
from common.utils.django import reverse, get_object_or_none, safe_next_url
|
from common.utils.django import reverse, get_object_or_none, safe_next_url
|
||||||
from common.utils.random import random_string
|
|
||||||
from common.views.mixins import UserConfirmRequiredExceptionMixin, PermissionsMixin
|
from common.views.mixins import UserConfirmRequiredExceptionMixin, PermissionsMixin
|
||||||
from users.models import User
|
from users.models import User
|
||||||
from users.views import UserVerifyPasswordView
|
from users.views import UserVerifyPasswordView
|
||||||
from .base import BaseLoginCallbackView, BaseBindCallbackView
|
from .base import BaseLoginCallbackView, BaseBindCallbackView
|
||||||
from .mixins import METAMixin, FlashMessageMixin
|
from .mixins import METAMixin, FlashMessageMixin
|
||||||
|
|
||||||
logger = get_logger(__file__)
|
|
||||||
|
|
||||||
WECOM_STATE_SESSION_KEY = '_wecom_state'
|
logger = get_logger(__file__)
|
||||||
|
|
||||||
|
|
||||||
class WeComBaseMixin(UserConfirmRequiredExceptionMixin, PermissionsMixin, FlashMessageMixin, View):
|
class WeComBaseMixin(UserConfirmRequiredExceptionMixin, PermissionsMixin, FlashMessageMixin, View):
|
||||||
|
@ -45,7 +42,7 @@ class WeComBaseMixin(UserConfirmRequiredExceptionMixin, PermissionsMixin, FlashM
|
||||||
)
|
)
|
||||||
|
|
||||||
def verify_state(self):
|
def verify_state(self):
|
||||||
return self.verify_state_with_session_key(WECOM_STATE_SESSION_KEY)
|
return wecom_tool.check_state(self.request.GET.get('state'), self.request)
|
||||||
|
|
||||||
def get_already_bound_response(self, redirect_url):
|
def get_already_bound_response(self, redirect_url):
|
||||||
msg = _('WeCom is already bound')
|
msg = _('WeCom is already bound')
|
||||||
|
@ -56,13 +53,10 @@ class WeComBaseMixin(UserConfirmRequiredExceptionMixin, PermissionsMixin, FlashM
|
||||||
class WeComQRMixin(WeComBaseMixin, View):
|
class WeComQRMixin(WeComBaseMixin, View):
|
||||||
|
|
||||||
def get_qr_url(self, redirect_uri):
|
def get_qr_url(self, redirect_uri):
|
||||||
state = random_string(16)
|
|
||||||
self.request.session[WECOM_STATE_SESSION_KEY] = state
|
|
||||||
|
|
||||||
params = {
|
params = {
|
||||||
'appid': settings.WECOM_CORPID,
|
'appid': settings.WECOM_CORPID,
|
||||||
'agentid': settings.WECOM_AGENTID,
|
'agentid': settings.WECOM_AGENTID,
|
||||||
'state': state,
|
'state': wecom_tool.gen_state(request=self.request),
|
||||||
'redirect_uri': redirect_uri,
|
'redirect_uri': redirect_uri,
|
||||||
}
|
}
|
||||||
url = URL.QR_CONNECT + '?' + urlencode(params)
|
url = URL.QR_CONNECT + '?' + urlencode(params)
|
||||||
|
@ -74,13 +68,11 @@ class WeComOAuthMixin(WeComBaseMixin, View):
|
||||||
def get_oauth_url(self, redirect_uri):
|
def get_oauth_url(self, redirect_uri):
|
||||||
if not settings.AUTH_WECOM:
|
if not settings.AUTH_WECOM:
|
||||||
return reverse('authentication:login')
|
return reverse('authentication:login')
|
||||||
state = random_string(16)
|
|
||||||
self.request.session[WECOM_STATE_SESSION_KEY] = state
|
|
||||||
|
|
||||||
params = {
|
params = {
|
||||||
'appid': settings.WECOM_CORPID,
|
'appid': settings.WECOM_CORPID,
|
||||||
'agentid': settings.WECOM_AGENTID,
|
'agentid': settings.WECOM_AGENTID,
|
||||||
'state': state,
|
'state': wecom_tool.gen_state(request=self.request),
|
||||||
'redirect_uri': redirect_uri,
|
'redirect_uri': redirect_uri,
|
||||||
'response_type': 'code',
|
'response_type': 'code',
|
||||||
'scope': 'snsapi_base',
|
'scope': 'snsapi_base',
|
||||||
|
|
|
@ -16,12 +16,6 @@ def digest(corp_id, corp_secret):
|
||||||
return dist
|
return dist
|
||||||
|
|
||||||
|
|
||||||
def update_values(default: dict, others: dict):
|
|
||||||
for key in default.keys():
|
|
||||||
if key in others:
|
|
||||||
default[key] = others[key]
|
|
||||||
|
|
||||||
|
|
||||||
def set_default(data: dict, default: dict):
|
def set_default(data: dict, default: dict):
|
||||||
for key in default.keys():
|
for key in default.keys():
|
||||||
if key not in data:
|
if key not in data:
|
||||||
|
|
|
@ -1,12 +1,14 @@
|
||||||
from typing import Iterable, AnyStr
|
from typing import Iterable, AnyStr
|
||||||
|
from urllib.parse import urlencode
|
||||||
|
|
||||||
from django.conf import settings
|
from django.conf import settings
|
||||||
|
from django.core.cache import cache
|
||||||
from django.utils.translation import gettext_lazy as _
|
from django.utils.translation import gettext_lazy as _
|
||||||
from rest_framework.exceptions import APIException
|
from rest_framework.exceptions import APIException
|
||||||
|
|
||||||
from common.sdk.im.mixin import RequestMixin, BaseRequest
|
from common.sdk.im.mixin import RequestMixin, BaseRequest
|
||||||
from common.sdk.im.utils import digest, update_values
|
from common.sdk.im.utils import digest
|
||||||
from common.utils.common import get_logger
|
from common.utils import reverse, random_string, get_logger, lazyproperty
|
||||||
from users.utils import construct_user_email, flatten_dict, map_attributes
|
from users.utils import construct_user_email, flatten_dict, map_attributes
|
||||||
|
|
||||||
logger = get_logger(__name__)
|
logger = get_logger(__name__)
|
||||||
|
@ -107,15 +109,6 @@ class WeCom(RequestMixin):
|
||||||
对于业务代码,只需要关心由 用户id 或 消息不对 导致的错误,其他错误不予理会
|
对于业务代码,只需要关心由 用户id 或 消息不对 导致的错误,其他错误不予理会
|
||||||
"""
|
"""
|
||||||
users = tuple(users)
|
users = tuple(users)
|
||||||
|
|
||||||
extra_params = {
|
|
||||||
"safe": 0,
|
|
||||||
"enable_id_trans": 0,
|
|
||||||
"enable_duplicate_check": 0,
|
|
||||||
"duplicate_check_interval": 1800
|
|
||||||
}
|
|
||||||
update_values(extra_params, kwargs)
|
|
||||||
|
|
||||||
body = {
|
body = {
|
||||||
"touser": '|'.join(users),
|
"touser": '|'.join(users),
|
||||||
"msgtype": "text",
|
"msgtype": "text",
|
||||||
|
@ -123,7 +116,7 @@ class WeCom(RequestMixin):
|
||||||
"text": {
|
"text": {
|
||||||
"content": msg
|
"content": msg
|
||||||
},
|
},
|
||||||
**extra_params
|
**kwargs
|
||||||
}
|
}
|
||||||
if markdown:
|
if markdown:
|
||||||
body['msgtype'] = 'markdown'
|
body['msgtype'] = 'markdown'
|
||||||
|
@ -144,15 +137,15 @@ class WeCom(RequestMixin):
|
||||||
if 'invaliduser' not in data:
|
if 'invaliduser' not in data:
|
||||||
return ()
|
return ()
|
||||||
|
|
||||||
invaliduser = data['invaliduser']
|
invalid_user = data['invaliduser']
|
||||||
if not invaliduser:
|
if not invalid_user:
|
||||||
return ()
|
return ()
|
||||||
|
|
||||||
if isinstance(invaliduser, str):
|
if isinstance(invalid_user, str):
|
||||||
logger.error(f'WeCom send text 200, but invaliduser is not str: invaliduser={invaliduser}')
|
logger.error(f'WeCom send text 200, but invaliduser is not str: invaliduser={invalid_user}')
|
||||||
raise WeComError
|
raise WeComError
|
||||||
|
|
||||||
invalid_users = invaliduser.split('|')
|
invalid_users = invalid_user.split('|')
|
||||||
return invalid_users
|
return invalid_users
|
||||||
|
|
||||||
def get_user_id_by_code(self, code):
|
def get_user_id_by_code(self, code):
|
||||||
|
@ -167,13 +160,12 @@ class WeCom(RequestMixin):
|
||||||
|
|
||||||
self._requests.check_errcode_is_0(data)
|
self._requests.check_errcode_is_0(data)
|
||||||
|
|
||||||
USER_ID = 'UserId'
|
user_id = 'UserId'
|
||||||
OPEN_ID = 'OpenId'
|
open_id = 'OpenId'
|
||||||
|
if user_id in data:
|
||||||
if USER_ID in data:
|
return data[user_id], user_id
|
||||||
return data[USER_ID], USER_ID
|
elif open_id in data:
|
||||||
elif OPEN_ID in data:
|
return data[open_id], open_id
|
||||||
return data[OPEN_ID], OPEN_ID
|
|
||||||
else:
|
else:
|
||||||
logger.error(f'WeCom response 200 but get field from json error: fields=UserId|OpenId')
|
logger.error(f'WeCom response 200 but get field from json error: fields=UserId|OpenId')
|
||||||
raise WeComError
|
raise WeComError
|
||||||
|
@ -195,3 +187,37 @@ class WeCom(RequestMixin):
|
||||||
default_detail = self.default_user_detail(data, user_id)
|
default_detail = self.default_user_detail(data, user_id)
|
||||||
detail = map_attributes(default_detail, info, self.attributes)
|
detail = map_attributes(default_detail, info, self.attributes)
|
||||||
return detail
|
return detail
|
||||||
|
|
||||||
|
|
||||||
|
class WeComTool(object):
|
||||||
|
WECOM_STATE_SESSION_KEY = '_wecom_state'
|
||||||
|
WECOM_STATE_VALUE = 'wecom'
|
||||||
|
|
||||||
|
@lazyproperty
|
||||||
|
def qr_cb_url(self):
|
||||||
|
return reverse('authentication:wecom-qr-login-callback', external=True)
|
||||||
|
|
||||||
|
def gen_state(self, request=None):
|
||||||
|
state = random_string(16)
|
||||||
|
if not request:
|
||||||
|
cache.set(state, self.WECOM_STATE_VALUE, timeout=60 * 60 * 24)
|
||||||
|
else:
|
||||||
|
request.session[self.WECOM_STATE_SESSION_KEY] = state
|
||||||
|
return state
|
||||||
|
|
||||||
|
def check_state(self, state, request=None):
|
||||||
|
return cache.get(state) == self.WECOM_STATE_VALUE or \
|
||||||
|
request.session[self.WECOM_STATE_SESSION_KEY] == state
|
||||||
|
|
||||||
|
def wrap_redirect_url(self, next_url):
|
||||||
|
params = {
|
||||||
|
'appid': settings.WECOM_CORPID,
|
||||||
|
'agentid': settings.WECOM_AGENTID,
|
||||||
|
'state': self.gen_state(),
|
||||||
|
'redirect_uri': f'{self.qr_cb_url}?next={next_url}',
|
||||||
|
'response_type': 'code', 'scope': 'snsapi_base',
|
||||||
|
}
|
||||||
|
return URL.OAUTH_CONNECT + '?' + urlencode(params) + '#wechat_redirect'
|
||||||
|
|
||||||
|
|
||||||
|
wecom_tool = WeComTool()
|
||||||
|
|
|
@ -1,9 +1,9 @@
|
||||||
import os
|
import os
|
||||||
|
|
||||||
import jms_storage
|
|
||||||
from django.conf import settings
|
from django.conf import settings
|
||||||
from django.core.files.storage import default_storage
|
from django.core.files.storage import default_storage
|
||||||
|
|
||||||
|
from common.storage import jms_storage
|
||||||
from common.utils import get_logger, make_dirs
|
from common.utils import get_logger, make_dirs
|
||||||
from terminal.models import ReplayStorage
|
from terminal.models import ReplayStorage
|
||||||
|
|
||||||
|
|
|
@ -0,0 +1,41 @@
|
||||||
|
#!/usr/bin/env python
|
||||||
|
# coding: utf-8
|
||||||
|
# Copyright (c) 2018
|
||||||
|
#
|
||||||
|
|
||||||
|
__version__ = '0.0.59'
|
||||||
|
|
||||||
|
from .ftp import FTPStorage
|
||||||
|
from .oss import OSSStorage
|
||||||
|
from .obs import OBSStorage
|
||||||
|
from .s3 import S3Storage
|
||||||
|
from .azure import AzureStorage
|
||||||
|
from .ceph import CEPHStorage
|
||||||
|
from .jms import JMSReplayStorage, JMSCommandStorage
|
||||||
|
from .multi import MultiObjectStorage
|
||||||
|
from .sftp import SFTPStorage
|
||||||
|
|
||||||
|
|
||||||
|
def get_object_storage(config):
|
||||||
|
if config.get("TYPE") in ["s3", "ceph", "swift", "cos"]:
|
||||||
|
return S3Storage(config)
|
||||||
|
elif config.get("TYPE") == "oss":
|
||||||
|
return OSSStorage(config)
|
||||||
|
elif config.get("TYPE") == "server":
|
||||||
|
return JMSReplayStorage(config)
|
||||||
|
elif config.get("TYPE") == "azure":
|
||||||
|
return AzureStorage(config)
|
||||||
|
elif config.get("TYPE") == "ceph":
|
||||||
|
return CEPHStorage(config)
|
||||||
|
elif config.get("TYPE") == "ftp":
|
||||||
|
return FTPStorage(config)
|
||||||
|
elif config.get("TYPE") == "obs":
|
||||||
|
return OBSStorage(config)
|
||||||
|
elif config.get("TYPE") == "sftp":
|
||||||
|
return SFTPStorage(config)
|
||||||
|
else:
|
||||||
|
return JMSReplayStorage(config)
|
||||||
|
|
||||||
|
|
||||||
|
def get_multi_object_storage(configs):
|
||||||
|
return MultiObjectStorage(configs)
|
|
@ -0,0 +1,61 @@
|
||||||
|
# -*- coding: utf-8 -*-
|
||||||
|
#
|
||||||
|
|
||||||
|
import os
|
||||||
|
|
||||||
|
from azure.storage.blob import BlobServiceClient
|
||||||
|
|
||||||
|
from .base import ObjectStorage
|
||||||
|
|
||||||
|
|
||||||
|
class AzureStorage(ObjectStorage):
|
||||||
|
|
||||||
|
def __init__(self, config):
|
||||||
|
self.account_name = config.get("ACCOUNT_NAME", None)
|
||||||
|
self.account_key = config.get("ACCOUNT_KEY", None)
|
||||||
|
self.container_name = config.get("CONTAINER_NAME", None)
|
||||||
|
self.endpoint_suffix = config.get("ENDPOINT_SUFFIX", 'core.chinacloudapi.cn')
|
||||||
|
|
||||||
|
if self.account_name and self.account_key:
|
||||||
|
self.service_client = BlobServiceClient(
|
||||||
|
account_url=f'https://{self.account_name}.blob.{self.endpoint_suffix}',
|
||||||
|
credential={'account_name': self.account_name, 'account_key': self.account_key}
|
||||||
|
)
|
||||||
|
self.client = self.service_client.get_container_client(self.container_name)
|
||||||
|
else:
|
||||||
|
self.client = None
|
||||||
|
|
||||||
|
def upload(self, src, target):
|
||||||
|
try:
|
||||||
|
self.client.upload_blob(target, src)
|
||||||
|
return True, None
|
||||||
|
except Exception as e:
|
||||||
|
return False, e
|
||||||
|
|
||||||
|
def download(self, src, target):
|
||||||
|
try:
|
||||||
|
blob_data = self.client.download_blob(blob=src)
|
||||||
|
os.makedirs(os.path.dirname(target), 0o755, exist_ok=True)
|
||||||
|
with open(target, 'wb') as writer:
|
||||||
|
writer.write(blob_data.readall())
|
||||||
|
return True, None
|
||||||
|
except Exception as e:
|
||||||
|
return False, e
|
||||||
|
|
||||||
|
def delete(self, path):
|
||||||
|
try:
|
||||||
|
self.client.delete_blob(path)
|
||||||
|
return True, False
|
||||||
|
except Exception as e:
|
||||||
|
return False, e
|
||||||
|
|
||||||
|
def exists(self, path):
|
||||||
|
resp = self.client.list_blobs(name_starts_with=path)
|
||||||
|
return len(list(resp)) != 0
|
||||||
|
|
||||||
|
def list_buckets(self):
|
||||||
|
return list(self.service_client.list_containers())
|
||||||
|
|
||||||
|
@property
|
||||||
|
def type(self):
|
||||||
|
return 'azure'
|
|
@ -0,0 +1,51 @@
|
||||||
|
# -*- coding: utf-8 -*-
|
||||||
|
#
|
||||||
|
|
||||||
|
import abc
|
||||||
|
|
||||||
|
|
||||||
|
class ObjectStorage(metaclass=abc.ABCMeta):
|
||||||
|
@abc.abstractmethod
|
||||||
|
def upload(self, src, target):
|
||||||
|
return None, None
|
||||||
|
|
||||||
|
@abc.abstractmethod
|
||||||
|
def download(self, src, target):
|
||||||
|
pass
|
||||||
|
|
||||||
|
@abc.abstractmethod
|
||||||
|
def delete(self, path):
|
||||||
|
pass
|
||||||
|
|
||||||
|
@abc.abstractmethod
|
||||||
|
def exists(self, path):
|
||||||
|
pass
|
||||||
|
|
||||||
|
def is_valid(self, src, target):
|
||||||
|
ok, msg = self.upload(src=src, target=target)
|
||||||
|
if not ok:
|
||||||
|
return False
|
||||||
|
self.delete(path=target)
|
||||||
|
return True
|
||||||
|
|
||||||
|
|
||||||
|
class LogStorage(metaclass=abc.ABCMeta):
|
||||||
|
@abc.abstractmethod
|
||||||
|
def save(self, command):
|
||||||
|
pass
|
||||||
|
|
||||||
|
@abc.abstractmethod
|
||||||
|
def bulk_save(self, command_set, raise_on_error=True):
|
||||||
|
pass
|
||||||
|
|
||||||
|
@abc.abstractmethod
|
||||||
|
def filter(self, date_from=None, date_to=None,
|
||||||
|
user=None, asset=None, account=None,
|
||||||
|
input=None, session=None):
|
||||||
|
pass
|
||||||
|
|
||||||
|
@abc.abstractmethod
|
||||||
|
def count(self, date_from=None, date_to=None,
|
||||||
|
user=None, asset=None, account=None,
|
||||||
|
input=None, session=None):
|
||||||
|
pass
|
|
@ -0,0 +1,68 @@
|
||||||
|
# -*- coding: utf-8 -*-
|
||||||
|
#
|
||||||
|
|
||||||
|
import os
|
||||||
|
import boto
|
||||||
|
import boto.s3.connection
|
||||||
|
|
||||||
|
from .base import ObjectStorage
|
||||||
|
|
||||||
|
|
||||||
|
class CEPHStorage(ObjectStorage):
|
||||||
|
|
||||||
|
def __init__(self, config):
|
||||||
|
self.bucket = config.get("BUCKET", None)
|
||||||
|
self.region = config.get("REGION", None)
|
||||||
|
self.access_key = config.get("ACCESS_KEY", None)
|
||||||
|
self.secret_key = config.get("SECRET_KEY", None)
|
||||||
|
self.hostname = config.get("HOSTNAME", None)
|
||||||
|
self.port = config.get("PORT", 7480)
|
||||||
|
|
||||||
|
if self.hostname and self.access_key and self.secret_key:
|
||||||
|
self.conn = boto.connect_s3(
|
||||||
|
aws_access_key_id=self.access_key,
|
||||||
|
aws_secret_access_key=self.secret_key,
|
||||||
|
host=self.hostname,
|
||||||
|
port=self.port,
|
||||||
|
is_secure=False,
|
||||||
|
calling_format=boto.s3.connection.OrdinaryCallingFormat(),
|
||||||
|
)
|
||||||
|
|
||||||
|
try:
|
||||||
|
self.client = self.conn.get_bucket(bucket_name=self.bucket)
|
||||||
|
except Exception:
|
||||||
|
self.client = None
|
||||||
|
|
||||||
|
def upload(self, src, target):
|
||||||
|
try:
|
||||||
|
key = self.client.new_key(target)
|
||||||
|
key.set_contents_from_filename(src)
|
||||||
|
return True, None
|
||||||
|
except Exception as e:
|
||||||
|
return False, e
|
||||||
|
|
||||||
|
def download(self, src, target):
|
||||||
|
try:
|
||||||
|
os.makedirs(os.path.dirname(target), 0o755, exist_ok=True)
|
||||||
|
key = self.client.get_key(src)
|
||||||
|
key.get_contents_to_filename(target)
|
||||||
|
return True, None
|
||||||
|
except Exception as e:
|
||||||
|
return False, e
|
||||||
|
|
||||||
|
def delete(self, path):
|
||||||
|
try:
|
||||||
|
self.client.delete_key(path)
|
||||||
|
return True, None
|
||||||
|
except Exception as e:
|
||||||
|
return False, e
|
||||||
|
|
||||||
|
def exists(self, path):
|
||||||
|
try:
|
||||||
|
return self.client.get_key(path)
|
||||||
|
except Exception:
|
||||||
|
return False
|
||||||
|
|
||||||
|
@property
|
||||||
|
def type(self):
|
||||||
|
return 'ceph'
|
|
@ -0,0 +1,116 @@
|
||||||
|
# -*- coding: utf-8 -*-
|
||||||
|
#
|
||||||
|
|
||||||
|
import os
|
||||||
|
from ftplib import FTP, error_perm
|
||||||
|
from .base import ObjectStorage
|
||||||
|
|
||||||
|
|
||||||
|
class FTPStorage(ObjectStorage):
|
||||||
|
|
||||||
|
def __init__(self, config):
|
||||||
|
self.host = config.get("HOST", None)
|
||||||
|
self.port = int(config.get("PORT", 21))
|
||||||
|
self.username = config.get("USERNAME", None)
|
||||||
|
self.password = config.get("PASSWORD", None)
|
||||||
|
self.pasv = bool(config.get("PASV", False))
|
||||||
|
self.dir = config.get("DIR", "replay")
|
||||||
|
self.client = FTP()
|
||||||
|
self.client.encoding = 'utf-8'
|
||||||
|
self.client.set_pasv(self.pasv)
|
||||||
|
self.pwd = '.'
|
||||||
|
self.connect()
|
||||||
|
|
||||||
|
def connect(self, timeout=-999, source_address=None):
|
||||||
|
self.client.connect(self.host, self.port, timeout, source_address)
|
||||||
|
self.client.login(self.username, self.password)
|
||||||
|
if not self.check_dir_exist(self.dir):
|
||||||
|
self.mkdir(self.dir)
|
||||||
|
self.client.cwd(self.dir)
|
||||||
|
self.pwd = self.client.pwd()
|
||||||
|
|
||||||
|
def confirm_connected(self):
|
||||||
|
try:
|
||||||
|
self.client.pwd()
|
||||||
|
except Exception:
|
||||||
|
self.connect()
|
||||||
|
|
||||||
|
def upload(self, src, target):
|
||||||
|
self.confirm_connected()
|
||||||
|
target_dir = os.path.dirname(target)
|
||||||
|
exist = self.check_dir_exist(target_dir)
|
||||||
|
if not exist:
|
||||||
|
ok = self.mkdir(target_dir)
|
||||||
|
if not ok:
|
||||||
|
raise PermissionError('Dir create error: %s' % target)
|
||||||
|
try:
|
||||||
|
with open(src, 'rb') as f:
|
||||||
|
self.client.storbinary('STOR '+target, f)
|
||||||
|
return True, None
|
||||||
|
except Exception as e:
|
||||||
|
return False, e
|
||||||
|
|
||||||
|
def download(self, src, target):
|
||||||
|
self.confirm_connected()
|
||||||
|
try:
|
||||||
|
os.makedirs(os.path.dirname(target), 0o755, exist_ok=True)
|
||||||
|
with open(target, 'wb') as f:
|
||||||
|
self.client.retrbinary('RETR ' + src, f.write)
|
||||||
|
return True, None
|
||||||
|
except Exception as e:
|
||||||
|
return False, e
|
||||||
|
|
||||||
|
def delete(self, path):
|
||||||
|
self.confirm_connected()
|
||||||
|
if not self.exists(path):
|
||||||
|
raise FileNotFoundError('File not exist error(%s)' % path)
|
||||||
|
try:
|
||||||
|
self.client.delete(path)
|
||||||
|
return True, None
|
||||||
|
except Exception as e:
|
||||||
|
return False, e
|
||||||
|
|
||||||
|
def check_dir_exist(self, d):
|
||||||
|
pwd = self.client.pwd()
|
||||||
|
try:
|
||||||
|
self.client.cwd(d)
|
||||||
|
self.client.cwd(pwd)
|
||||||
|
return True
|
||||||
|
except error_perm:
|
||||||
|
return False
|
||||||
|
|
||||||
|
def mkdir(self, dirs):
|
||||||
|
self.confirm_connected()
|
||||||
|
# 创建多级目录,ftplib不支持一次创建多级目录
|
||||||
|
dir_list = dirs.split('/')
|
||||||
|
pwd = self.client.pwd()
|
||||||
|
try:
|
||||||
|
for d in dir_list:
|
||||||
|
if not d or d in ['.']:
|
||||||
|
continue
|
||||||
|
# 尝试切换目录
|
||||||
|
try:
|
||||||
|
self.client.cwd(d)
|
||||||
|
continue
|
||||||
|
except:
|
||||||
|
pass
|
||||||
|
# 切换失败创建这个目录,再切换
|
||||||
|
try:
|
||||||
|
self.client.mkd(d)
|
||||||
|
self.client.cwd(d)
|
||||||
|
except:
|
||||||
|
return False
|
||||||
|
return True
|
||||||
|
finally:
|
||||||
|
self.client.cwd(pwd)
|
||||||
|
|
||||||
|
def exists(self, target):
|
||||||
|
self.confirm_connected()
|
||||||
|
try:
|
||||||
|
self.client.size(target)
|
||||||
|
return True
|
||||||
|
except:
|
||||||
|
return False
|
||||||
|
|
||||||
|
def close(self):
|
||||||
|
self.client.close()
|
|
@ -0,0 +1,50 @@
|
||||||
|
# -*- coding: utf-8 -*-
|
||||||
|
#
|
||||||
|
import os
|
||||||
|
from .base import ObjectStorage, LogStorage
|
||||||
|
|
||||||
|
|
||||||
|
class JMSReplayStorage(ObjectStorage):
|
||||||
|
def __init__(self, config):
|
||||||
|
self.client = config.get("SERVICE")
|
||||||
|
|
||||||
|
def upload(self, src, target):
|
||||||
|
session_id = os.path.basename(target).split('.')[0]
|
||||||
|
ok = self.client.push_session_replay(src, session_id)
|
||||||
|
return ok, None
|
||||||
|
|
||||||
|
def delete(self, path):
|
||||||
|
return False, Exception("Not support not")
|
||||||
|
|
||||||
|
def exists(self, path):
|
||||||
|
return False
|
||||||
|
|
||||||
|
def download(self, src, target):
|
||||||
|
return False, Exception("Not support not")
|
||||||
|
|
||||||
|
@property
|
||||||
|
def type(self):
|
||||||
|
return 'jms'
|
||||||
|
|
||||||
|
|
||||||
|
class JMSCommandStorage(LogStorage):
|
||||||
|
def __init__(self, config):
|
||||||
|
self.client = config.get("SERVICE")
|
||||||
|
if not self.client:
|
||||||
|
raise Exception("Not found app service")
|
||||||
|
|
||||||
|
def save(self, command):
|
||||||
|
return self.client.push_session_command([command])
|
||||||
|
|
||||||
|
def bulk_save(self, command_set, raise_on_error=True):
|
||||||
|
return self.client.push_session_command(command_set)
|
||||||
|
|
||||||
|
def filter(self, date_from=None, date_to=None,
|
||||||
|
user=None, asset=None, account=None,
|
||||||
|
input=None, session=None):
|
||||||
|
pass
|
||||||
|
|
||||||
|
def count(self, date_from=None, date_to=None,
|
||||||
|
user=None, asset=None, account=None,
|
||||||
|
input=None, session=None):
|
||||||
|
pass
|
|
@ -0,0 +1,77 @@
|
||||||
|
# -*- coding: utf-8 -*-
|
||||||
|
#
|
||||||
|
|
||||||
|
from .base import ObjectStorage, LogStorage
|
||||||
|
|
||||||
|
|
||||||
|
class MultiObjectStorage(ObjectStorage):
|
||||||
|
|
||||||
|
def __init__(self, configs):
|
||||||
|
self.configs = configs
|
||||||
|
self.storage_list = []
|
||||||
|
self.init_storage_list()
|
||||||
|
|
||||||
|
def init_storage_list(self):
|
||||||
|
from . import get_object_storage
|
||||||
|
if isinstance(self.configs, dict):
|
||||||
|
configs = self.configs.values()
|
||||||
|
else:
|
||||||
|
configs = self.configs
|
||||||
|
|
||||||
|
for config in configs:
|
||||||
|
try:
|
||||||
|
storage = get_object_storage(config)
|
||||||
|
self.storage_list.append(storage)
|
||||||
|
except Exception:
|
||||||
|
pass
|
||||||
|
|
||||||
|
def upload(self, src, target):
|
||||||
|
success = []
|
||||||
|
msg = []
|
||||||
|
|
||||||
|
for storage in self.storage_list:
|
||||||
|
ok, err = storage.upload(src, target)
|
||||||
|
success.append(ok)
|
||||||
|
msg.append(err)
|
||||||
|
|
||||||
|
return success, msg
|
||||||
|
|
||||||
|
def download(self, src, target):
|
||||||
|
success = False
|
||||||
|
msg = None
|
||||||
|
|
||||||
|
for storage in self.storage_list:
|
||||||
|
try:
|
||||||
|
if not storage.exists(src):
|
||||||
|
continue
|
||||||
|
ok, msg = storage.download(src, target)
|
||||||
|
if ok:
|
||||||
|
success = True
|
||||||
|
msg = ''
|
||||||
|
break
|
||||||
|
except:
|
||||||
|
pass
|
||||||
|
return success, msg
|
||||||
|
|
||||||
|
def delete(self, path):
|
||||||
|
success = True
|
||||||
|
msg = None
|
||||||
|
|
||||||
|
for storage in self.storage_list:
|
||||||
|
try:
|
||||||
|
if storage.exists(path):
|
||||||
|
ok, msg = storage.delete(path)
|
||||||
|
if not ok:
|
||||||
|
success = False
|
||||||
|
except:
|
||||||
|
pass
|
||||||
|
return success, msg
|
||||||
|
|
||||||
|
def exists(self, path):
|
||||||
|
for storage in self.storage_list:
|
||||||
|
try:
|
||||||
|
if storage.exists(path):
|
||||||
|
return True
|
||||||
|
except:
|
||||||
|
pass
|
||||||
|
return False
|
|
@ -0,0 +1,70 @@
|
||||||
|
# -*- coding: utf-8 -*-
|
||||||
|
#
|
||||||
|
import os
|
||||||
|
|
||||||
|
from obs.client import ObsClient
|
||||||
|
from .base import ObjectStorage
|
||||||
|
|
||||||
|
|
||||||
|
class OBSStorage(ObjectStorage):
|
||||||
|
def __init__(self, config):
|
||||||
|
self.endpoint = config.get("ENDPOINT", None)
|
||||||
|
self.bucket = config.get("BUCKET", None)
|
||||||
|
self.access_key = config.get("ACCESS_KEY", None)
|
||||||
|
self.secret_key = config.get("SECRET_KEY", None)
|
||||||
|
if self.access_key and self.secret_key and self.endpoint:
|
||||||
|
proxy_host = os.getenv("proxy_host")
|
||||||
|
proxy_port = os.getenv("proxy_port")
|
||||||
|
proxy_username = os.getenv("proxy_username")
|
||||||
|
proxy_password = os.getenv("proxy_password")
|
||||||
|
self.obsClient = ObsClient(access_key_id=self.access_key, secret_access_key=self.secret_key, server=self.endpoint, proxy_host=proxy_host, proxy_port=proxy_port, proxy_username=proxy_username, proxy_password=proxy_password)
|
||||||
|
else:
|
||||||
|
self.obsClient = None
|
||||||
|
|
||||||
|
def upload(self, src, target):
|
||||||
|
try:
|
||||||
|
resp = self.obsClient.putFile(self.bucket, target, src)
|
||||||
|
if resp.status < 300:
|
||||||
|
return True, None
|
||||||
|
else:
|
||||||
|
return False, resp.reason
|
||||||
|
except Exception as e:
|
||||||
|
return False, e
|
||||||
|
|
||||||
|
def exists(self, path):
|
||||||
|
resp = self.obsClient.getObjectMetadata(self.bucket, path)
|
||||||
|
if resp.status < 300:
|
||||||
|
return True
|
||||||
|
return False
|
||||||
|
|
||||||
|
def delete(self, path):
|
||||||
|
try:
|
||||||
|
resp = self.obsClient.deleteObject(self.bucket, path)
|
||||||
|
if resp.status < 300:
|
||||||
|
return True, None
|
||||||
|
else:
|
||||||
|
return False, resp.reason
|
||||||
|
except Exception as e:
|
||||||
|
return False, e
|
||||||
|
|
||||||
|
def download(self, src, target):
|
||||||
|
try:
|
||||||
|
os.makedirs(os.path.dirname(target), 0o755, exist_ok=True)
|
||||||
|
resp = self.obsClient.getObject(self.bucket, src, target)
|
||||||
|
if resp.status < 300:
|
||||||
|
return True, None
|
||||||
|
else:
|
||||||
|
return False, resp.reason
|
||||||
|
except Exception as e:
|
||||||
|
return False, e
|
||||||
|
|
||||||
|
def list_buckets(self):
|
||||||
|
resp = self.obsClient.listBuckets()
|
||||||
|
if resp.status < 300:
|
||||||
|
return [b.name for b in resp.body.buckets]
|
||||||
|
else:
|
||||||
|
raise RuntimeError(resp.status, str(resp.reason))
|
||||||
|
|
||||||
|
@property
|
||||||
|
def type(self):
|
||||||
|
return 'obs'
|
|
@ -0,0 +1,72 @@
|
||||||
|
# -*- coding: utf-8 -*-
|
||||||
|
#
|
||||||
|
import os
|
||||||
|
import time
|
||||||
|
|
||||||
|
import oss2
|
||||||
|
|
||||||
|
from .base import ObjectStorage
|
||||||
|
|
||||||
|
|
||||||
|
class OSSStorage(ObjectStorage):
|
||||||
|
def __init__(self, config):
|
||||||
|
self.endpoint = config.get("ENDPOINT", None)
|
||||||
|
self.bucket = config.get("BUCKET", None)
|
||||||
|
self.access_key = config.get("ACCESS_KEY", None)
|
||||||
|
self.secret_key = config.get("SECRET_KEY", None)
|
||||||
|
if self.access_key and self.secret_key:
|
||||||
|
self.auth = oss2.Auth(self.access_key, self.secret_key)
|
||||||
|
else:
|
||||||
|
self.auth = None
|
||||||
|
if self.auth and self.endpoint and self.bucket:
|
||||||
|
self.client = oss2.Bucket(self.auth, self.endpoint, self.bucket)
|
||||||
|
else:
|
||||||
|
self.client = None
|
||||||
|
|
||||||
|
def upload(self, src, target):
|
||||||
|
try:
|
||||||
|
self.client.put_object_from_file(target, src)
|
||||||
|
return True, None
|
||||||
|
except Exception as e:
|
||||||
|
return False, e
|
||||||
|
|
||||||
|
def exists(self, path):
|
||||||
|
try:
|
||||||
|
return self.client.object_exists(path)
|
||||||
|
except Exception as e:
|
||||||
|
return False
|
||||||
|
|
||||||
|
def delete(self, path):
|
||||||
|
try:
|
||||||
|
self.client.delete_object(path)
|
||||||
|
return True, None
|
||||||
|
except Exception as e:
|
||||||
|
return False, e
|
||||||
|
|
||||||
|
def restore(self, path):
|
||||||
|
meta = self.client.head_object(path)
|
||||||
|
if meta.resp.headers['x-oss-storage-class'] == oss2.BUCKET_STORAGE_CLASS_ARCHIVE:
|
||||||
|
self.client.restore_object(path)
|
||||||
|
while True:
|
||||||
|
meta = self.client.head_object(path)
|
||||||
|
if meta.resp.headers['x-oss-restore'] == 'ongoing-request="true"':
|
||||||
|
time.sleep(5)
|
||||||
|
else:
|
||||||
|
break
|
||||||
|
|
||||||
|
def download(self, src, target):
|
||||||
|
try:
|
||||||
|
os.makedirs(os.path.dirname(target), 0o755, exist_ok=True)
|
||||||
|
self.restore(src)
|
||||||
|
self.client.get_object_to_file(src, target)
|
||||||
|
return True, None
|
||||||
|
except Exception as e:
|
||||||
|
return False, e
|
||||||
|
|
||||||
|
def list_buckets(self):
|
||||||
|
service = oss2.Service(self.auth,self.endpoint)
|
||||||
|
return ([b.name for b in oss2.BucketIterator(service)])
|
||||||
|
|
||||||
|
@property
|
||||||
|
def type(self):
|
||||||
|
return 'oss'
|
|
@ -0,0 +1,74 @@
|
||||||
|
# -*- coding: utf-8 -*-
|
||||||
|
#
|
||||||
|
import boto3
|
||||||
|
import os
|
||||||
|
|
||||||
|
from .base import ObjectStorage
|
||||||
|
|
||||||
|
|
||||||
|
class S3Storage(ObjectStorage):
|
||||||
|
def __init__(self, config):
|
||||||
|
self.bucket = config.get("BUCKET", "jumpserver")
|
||||||
|
self.region = config.get("REGION", None)
|
||||||
|
self.access_key = config.get("ACCESS_KEY", None)
|
||||||
|
self.secret_key = config.get("SECRET_KEY", None)
|
||||||
|
self.endpoint = config.get("ENDPOINT", None)
|
||||||
|
|
||||||
|
try:
|
||||||
|
self.client = boto3.client(
|
||||||
|
's3', region_name=self.region,
|
||||||
|
aws_access_key_id=self.access_key,
|
||||||
|
aws_secret_access_key=self.secret_key,
|
||||||
|
endpoint_url=self.endpoint
|
||||||
|
)
|
||||||
|
except ValueError:
|
||||||
|
pass
|
||||||
|
|
||||||
|
def upload(self, src, target):
|
||||||
|
try:
|
||||||
|
self.client.upload_file(Filename=src, Bucket=self.bucket, Key=target)
|
||||||
|
return True, None
|
||||||
|
except Exception as e:
|
||||||
|
return False, e
|
||||||
|
|
||||||
|
def exists(self, path):
|
||||||
|
try:
|
||||||
|
self.client.head_object(Bucket=self.bucket, Key=path)
|
||||||
|
return True
|
||||||
|
except Exception as e:
|
||||||
|
return False
|
||||||
|
|
||||||
|
def download(self, src, target):
|
||||||
|
try:
|
||||||
|
os.makedirs(os.path.dirname(target), 0o755, exist_ok=True)
|
||||||
|
self.client.download_file(self.bucket, src, target)
|
||||||
|
return True, None
|
||||||
|
except Exception as e:
|
||||||
|
return False, e
|
||||||
|
|
||||||
|
def delete(self, path):
|
||||||
|
try:
|
||||||
|
self.client.delete_object(Bucket=self.bucket, Key=path)
|
||||||
|
return True, None
|
||||||
|
except Exception as e:
|
||||||
|
return False, e
|
||||||
|
|
||||||
|
def generate_presigned_url(self, path, expire=3600):
|
||||||
|
try:
|
||||||
|
return self.client.generate_presigned_url(
|
||||||
|
ClientMethod='get_object',
|
||||||
|
Params={'Bucket': self.bucket, 'Key': path},
|
||||||
|
ExpiresIn=expire,
|
||||||
|
HttpMethod='GET'), None
|
||||||
|
except Exception as e:
|
||||||
|
return False, e
|
||||||
|
|
||||||
|
def list_buckets(self):
|
||||||
|
response = self.client.list_buckets()
|
||||||
|
buckets = response.get('Buckets', [])
|
||||||
|
result = [b['Name'] for b in buckets if b.get('Name')]
|
||||||
|
return result
|
||||||
|
|
||||||
|
@property
|
||||||
|
def type(self):
|
||||||
|
return 's3'
|
|
@ -0,0 +1,107 @@
|
||||||
|
# -*- coding: utf-8 -*-
|
||||||
|
import os
|
||||||
|
import io
|
||||||
|
import paramiko
|
||||||
|
from jms_storage.base import ObjectStorage
|
||||||
|
|
||||||
|
|
||||||
|
class SFTPStorage(ObjectStorage):
|
||||||
|
|
||||||
|
def __init__(self, config):
|
||||||
|
self.sftp = None
|
||||||
|
self.sftp_host = config.get('SFTP_HOST', None)
|
||||||
|
self.sftp_port = int(config.get('SFTP_PORT', 22))
|
||||||
|
self.sftp_username = config.get('SFTP_USERNAME', '')
|
||||||
|
self.sftp_secret_type = config.get('STP_SECRET_TYPE', 'password')
|
||||||
|
self.sftp_password = config.get('SFTP_PASSWORD', '')
|
||||||
|
self.sftp_private_key = config.get('STP_PRIVATE_KEY', '')
|
||||||
|
self.sftp_passphrase = config.get('STP_PASSPHRASE', '')
|
||||||
|
self.sftp_root_path = config.get('SFTP_ROOT_PATH', '/tmp')
|
||||||
|
self.ssh = paramiko.SSHClient()
|
||||||
|
self.connect()
|
||||||
|
|
||||||
|
def connect(self):
|
||||||
|
self.ssh.set_missing_host_key_policy(paramiko.AutoAddPolicy())
|
||||||
|
if self.sftp_secret_type == 'password':
|
||||||
|
self.ssh.connect(self.sftp_host, self.sftp_port, self.sftp_username, self.sftp_password)
|
||||||
|
elif self.sftp_secret_type == 'ssh_key':
|
||||||
|
pkey = paramiko.RSAKey.from_private_key(io.StringIO(self.sftp_private_key))
|
||||||
|
self.ssh.connect(self.sftp_host, self.sftp_port, self.sftp_username, pkey=pkey,
|
||||||
|
passphrase=self.sftp_passphrase)
|
||||||
|
self.sftp = self.ssh.open_sftp()
|
||||||
|
|
||||||
|
def confirm_connected(self):
|
||||||
|
try:
|
||||||
|
self.sftp.getcwd()
|
||||||
|
except Exception as e:
|
||||||
|
self.connect()
|
||||||
|
|
||||||
|
def upload(self, src, target):
|
||||||
|
local_file = src
|
||||||
|
remote_file = os.path.join(self.sftp_root_path, target)
|
||||||
|
try:
|
||||||
|
self.confirm_connected()
|
||||||
|
mode = os.stat(local_file).st_mode
|
||||||
|
remote_dir = os.path.dirname(remote_file)
|
||||||
|
if not self.exists(remote_dir):
|
||||||
|
self.sftp.mkdir(remote_dir)
|
||||||
|
self.sftp.put(local_file, remote_file)
|
||||||
|
self.sftp.chmod(remote_file, mode)
|
||||||
|
return True, None
|
||||||
|
except Exception as e:
|
||||||
|
return False, e
|
||||||
|
|
||||||
|
def download(self, src, target):
|
||||||
|
remote_file = src
|
||||||
|
local_file = target
|
||||||
|
self.confirm_connected()
|
||||||
|
try:
|
||||||
|
local_dir = os.path.dirname(local_file)
|
||||||
|
if not os.path.exists(local_dir):
|
||||||
|
os.makedirs(local_dir)
|
||||||
|
mode = self.sftp.stat(remote_file).st_mode
|
||||||
|
self.sftp.get(remote_file, local_file)
|
||||||
|
os.chmod(local_file, mode)
|
||||||
|
return True, None
|
||||||
|
except Exception as e:
|
||||||
|
return False, e
|
||||||
|
|
||||||
|
def delete(self, path):
|
||||||
|
path = os.path.join(self.sftp_root_path, path)
|
||||||
|
self.confirm_connected()
|
||||||
|
if not self.exists(path):
|
||||||
|
raise FileNotFoundError('File not exist error(%s)' % path)
|
||||||
|
try:
|
||||||
|
self.sftp.remove(path)
|
||||||
|
return True, None
|
||||||
|
except Exception as e:
|
||||||
|
return False, e
|
||||||
|
|
||||||
|
def check_dir_exist(self, d):
|
||||||
|
self.confirm_connected()
|
||||||
|
try:
|
||||||
|
self.sftp.stat(d)
|
||||||
|
return True
|
||||||
|
except Exception:
|
||||||
|
return False
|
||||||
|
|
||||||
|
def mkdir(self, dirs):
|
||||||
|
self.confirm_connected()
|
||||||
|
try:
|
||||||
|
if not self.exists(dirs):
|
||||||
|
self.sftp.mkdir(dirs)
|
||||||
|
return True, None
|
||||||
|
except Exception as e:
|
||||||
|
return False, e
|
||||||
|
|
||||||
|
def exists(self, target):
|
||||||
|
self.confirm_connected()
|
||||||
|
try:
|
||||||
|
self.sftp.stat(target)
|
||||||
|
return True
|
||||||
|
except:
|
||||||
|
return False
|
||||||
|
|
||||||
|
def close(self):
|
||||||
|
self.sftp.close()
|
||||||
|
self.ssh.close()
|
|
@ -1,11 +1,11 @@
|
||||||
import os
|
import os
|
||||||
|
|
||||||
import jms_storage
|
|
||||||
from celery import shared_task
|
from celery import shared_task
|
||||||
from django.conf import settings
|
from django.conf import settings
|
||||||
from django.core.mail import send_mail, EmailMultiAlternatives, get_connection
|
from django.core.mail import send_mail, EmailMultiAlternatives, get_connection
|
||||||
from django.utils.translation import gettext_lazy as _
|
from django.utils.translation import gettext_lazy as _
|
||||||
|
|
||||||
|
from common.storage import jms_storage
|
||||||
from .utils import get_logger
|
from .utils import get_logger
|
||||||
|
|
||||||
logger = get_logger(__file__)
|
logger = get_logger(__file__)
|
||||||
|
|
|
@ -127,13 +127,16 @@ class Message(metaclass=MessageType):
|
||||||
def get_html_msg(self) -> dict:
|
def get_html_msg(self) -> dict:
|
||||||
return self.get_common_msg()
|
return self.get_common_msg()
|
||||||
|
|
||||||
def get_markdown_msg(self) -> dict:
|
@staticmethod
|
||||||
|
def html_to_markdown(html_msg):
|
||||||
h = HTML2Text()
|
h = HTML2Text()
|
||||||
h.body_width = 300
|
h.body_width = 0
|
||||||
msg = self.get_html_msg()
|
content = html_msg['message']
|
||||||
content = msg['message']
|
html_msg['message'] = h.handle(content)
|
||||||
msg['message'] = h.handle(content)
|
return html_msg
|
||||||
return msg
|
|
||||||
|
def get_markdown_msg(self) -> dict:
|
||||||
|
return self.html_to_markdown(self.get_html_msg())
|
||||||
|
|
||||||
def get_text_msg(self) -> dict:
|
def get_text_msg(self) -> dict:
|
||||||
h = HTML2Text()
|
h = HTML2Text()
|
||||||
|
|
|
@ -9,4 +9,11 @@
|
||||||
ansible.windows.win_powershell:
|
ansible.windows.win_powershell:
|
||||||
script: |
|
script: |
|
||||||
tinkerd install --name {{ applet_name }}
|
tinkerd install --name {{ applet_name }}
|
||||||
|
$exitCode = $LASTEXITCODE
|
||||||
|
if ($exitCode -ne 0) {
|
||||||
|
Write-Host "Failed to install {{ applet_name }}"
|
||||||
|
Write-Host "Exit code: $exitCode"
|
||||||
|
$Ansible.Failed = $true
|
||||||
|
exit 1
|
||||||
|
}
|
||||||
when: applet_name != 'all'
|
when: applet_name != 'all'
|
||||||
|
|
|
@ -18,7 +18,7 @@
|
||||||
PYTHON_VERSION: 3.11.10
|
PYTHON_VERSION: 3.11.10
|
||||||
CHROME_VERSION: 118.0.5993.118
|
CHROME_VERSION: 118.0.5993.118
|
||||||
CHROME_DRIVER_VERSION: 118.0.5993.70
|
CHROME_DRIVER_VERSION: 118.0.5993.70
|
||||||
TINKER_VERSION: v0.1.9
|
TINKER_VERSION: v0.2.0
|
||||||
|
|
||||||
tasks:
|
tasks:
|
||||||
- block:
|
- block:
|
||||||
|
@ -268,6 +268,13 @@
|
||||||
ansible.windows.win_powershell:
|
ansible.windows.win_powershell:
|
||||||
script: |
|
script: |
|
||||||
tinkerd install all
|
tinkerd install all
|
||||||
|
$exitCode = $LASTEXITCODE
|
||||||
|
if ($exitCode -ne 0) {
|
||||||
|
Write-Host "Failed to install applets"
|
||||||
|
Write-Host "Exit code: $exitCode"
|
||||||
|
$Ansible.Failed = $true
|
||||||
|
exit 1
|
||||||
|
}
|
||||||
register: sync_remote_applets
|
register: sync_remote_applets
|
||||||
when: INSTALL_APPLETS
|
when: INSTALL_APPLETS
|
||||||
|
|
||||||
|
|
|
@ -9,3 +9,10 @@
|
||||||
ansible.windows.win_powershell:
|
ansible.windows.win_powershell:
|
||||||
script: |
|
script: |
|
||||||
tinkerd uninstall --name {{ applet_name }}
|
tinkerd uninstall --name {{ applet_name }}
|
||||||
|
$exitCode = $LASTEXITCODE
|
||||||
|
if ($exitCode -ne 0) {
|
||||||
|
Write-Host "Failed to uninstall {{ applet_name }}"
|
||||||
|
Write-Host "Exit code: $exitCode"
|
||||||
|
$Ansible.Failed = $true
|
||||||
|
exit 1
|
||||||
|
}
|
||||||
|
|
|
@ -4,7 +4,6 @@ import copy
|
||||||
import os
|
import os
|
||||||
from importlib import import_module
|
from importlib import import_module
|
||||||
|
|
||||||
import jms_storage
|
|
||||||
from django.conf import settings
|
from django.conf import settings
|
||||||
from django.db import models
|
from django.db import models
|
||||||
from django.utils.translation import gettext_lazy as _
|
from django.utils.translation import gettext_lazy as _
|
||||||
|
@ -12,6 +11,7 @@ from django.utils.translation import gettext_lazy as _
|
||||||
from common.db.fields import EncryptJsonDictTextField
|
from common.db.fields import EncryptJsonDictTextField
|
||||||
from common.db.models import JMSBaseModel
|
from common.db.models import JMSBaseModel
|
||||||
from common.plugins.es import QuerySet as ESQuerySet
|
from common.plugins.es import QuerySet as ESQuerySet
|
||||||
|
from common.storage import jms_storage
|
||||||
from common.utils import get_logger
|
from common.utils import get_logger
|
||||||
from common.utils.timezone import local_now_date_display
|
from common.utils.timezone import local_now_date_display
|
||||||
from terminal import const
|
from terminal import const
|
||||||
|
|
|
@ -4,6 +4,7 @@ from django.conf import settings
|
||||||
from django.template.loader import render_to_string
|
from django.template.loader import render_to_string
|
||||||
from django.utils.translation import gettext_lazy as _
|
from django.utils.translation import gettext_lazy as _
|
||||||
|
|
||||||
|
from common.sdk.im.wecom import wecom_tool
|
||||||
from common.utils import get_logger, reverse
|
from common.utils import get_logger, reverse
|
||||||
from common.utils import lazyproperty
|
from common.utils import lazyproperty
|
||||||
from common.utils.timezone import local_now_display
|
from common.utils.timezone import local_now_display
|
||||||
|
@ -75,53 +76,50 @@ class CommandWarningMessage(CommandAlertMixin, UserMessage):
|
||||||
super().__init__(user)
|
super().__init__(user)
|
||||||
self.command = command
|
self.command = command
|
||||||
|
|
||||||
def get_html_msg(self) -> dict:
|
def get_session_url(self, external=True):
|
||||||
command = self.command
|
session_id = self.command.get('session', '')
|
||||||
|
org_id = self.command['org_id']
|
||||||
command_input = command['input']
|
session_url = ''
|
||||||
user = command['user']
|
|
||||||
asset = command['asset']
|
|
||||||
account = command.get('_account', '')
|
|
||||||
cmd_acl = command.get('_cmd_filter_acl')
|
|
||||||
cmd_group = command.get('_cmd_group')
|
|
||||||
session_id = command.get('session', '')
|
|
||||||
risk_level = command['risk_level']
|
|
||||||
org_id = command['org_id']
|
|
||||||
org_name = command.get('_org_name') or org_id
|
|
||||||
|
|
||||||
if session_id:
|
if session_id:
|
||||||
session_url = reverse(
|
session_url = reverse(
|
||||||
'api-terminal:session-detail', kwargs={'pk': session_id},
|
'api-terminal:session-detail', kwargs={'pk': session_id},
|
||||||
external=True, api_to_ui=True
|
external=external, api_to_ui=True
|
||||||
) + '?oid={}'.format(org_id)
|
) + '?oid={}'.format(org_id)
|
||||||
session_url = session_url.replace('/terminal/sessions/', '/audit/sessions/sessions/')
|
session_url = session_url.replace('/terminal/sessions/', '/audit/sessions/sessions/')
|
||||||
else:
|
return session_url
|
||||||
session_url = ''
|
|
||||||
|
|
||||||
# Command ACL
|
def gen_html_string(self, **other_context):
|
||||||
cmd_acl_name = cmd_group_name = ''
|
command = self.command
|
||||||
if cmd_acl:
|
cmd_acl = command.get('_cmd_filter_acl')
|
||||||
cmd_acl_name = cmd_acl.name
|
cmd_group = command.get('_cmd_group')
|
||||||
if cmd_group:
|
org_id = command['org_id']
|
||||||
cmd_group_name = cmd_group.name
|
org_name = command.get('_org_name') or org_id
|
||||||
|
cmd_acl_name = cmd_acl.name if cmd_acl else ''
|
||||||
|
cmd_group_name = cmd_group.name if cmd_group else ''
|
||||||
|
|
||||||
context = {
|
context = {
|
||||||
'command': command_input,
|
'command': command['input'],
|
||||||
'user': user,
|
'user': command['user'],
|
||||||
'asset': asset,
|
'asset': command['asset'],
|
||||||
'account': account,
|
'account': command.get('_account', ''),
|
||||||
'cmd_filter_acl': cmd_acl_name,
|
'cmd_filter_acl': cmd_acl_name,
|
||||||
'cmd_group': cmd_group_name,
|
'cmd_group': cmd_group_name,
|
||||||
'session_url': session_url,
|
'risk_level': RiskLevelChoices.get_label(command['risk_level']),
|
||||||
'risk_level': RiskLevelChoices.get_label(risk_level),
|
|
||||||
'org': org_name,
|
'org': org_name,
|
||||||
}
|
}
|
||||||
|
context.update(other_context)
|
||||||
message = render_to_string('terminal/_msg_command_warning.html', context)
|
message = render_to_string('terminal/_msg_command_warning.html', context)
|
||||||
return {
|
return {'subject': self.subject, 'message': message}
|
||||||
'subject': self.subject,
|
|
||||||
'message': message
|
def get_wecom_msg(self):
|
||||||
}
|
session_url = wecom_tool.wrap_redirect_url(
|
||||||
|
self.get_session_url(external=False)
|
||||||
|
)
|
||||||
|
message = self.gen_html_string(session_url=session_url)
|
||||||
|
return self.html_to_markdown(message)
|
||||||
|
|
||||||
|
def get_html_msg(self) -> dict:
|
||||||
|
return self.gen_html_string(session_url=self.get_session_url())
|
||||||
|
|
||||||
|
|
||||||
class CommandAlertMessage(CommandAlertMixin, SystemMessage):
|
class CommandAlertMessage(CommandAlertMixin, SystemMessage):
|
||||||
|
@ -141,15 +139,18 @@ class CommandAlertMessage(CommandAlertMixin, SystemMessage):
|
||||||
command['session'] = Session.objects.first().id
|
command['session'] = Session.objects.first().id
|
||||||
return cls(command)
|
return cls(command)
|
||||||
|
|
||||||
def get_html_msg(self) -> dict:
|
def get_session_url(self, external=True):
|
||||||
command = self.command
|
|
||||||
session_detail_url = reverse(
|
session_detail_url = reverse(
|
||||||
'api-terminal:session-detail', kwargs={'pk': command['session']},
|
'api-terminal:session-detail', api_to_ui=True,
|
||||||
external=True, api_to_ui=True
|
kwargs={'pk': self.command['session']}, external=external,
|
||||||
) + '?oid={}'.format(self.command['org_id'])
|
) + '?oid={}'.format(self.command['org_id'])
|
||||||
session_detail_url = session_detail_url.replace(
|
session_detail_url = session_detail_url.replace(
|
||||||
'/terminal/sessions/', '/audit/sessions/sessions/'
|
'/terminal/sessions/', '/audit/sessions/sessions/'
|
||||||
)
|
)
|
||||||
|
return session_detail_url
|
||||||
|
|
||||||
|
def gen_html_string(self, **other_context) -> dict:
|
||||||
|
command = self.command
|
||||||
level = RiskLevelChoices.get_label(command['risk_level'])
|
level = RiskLevelChoices.get_label(command['risk_level'])
|
||||||
items = {
|
items = {
|
||||||
_("Asset"): command['asset'],
|
_("Asset"): command['asset'],
|
||||||
|
@ -159,14 +160,21 @@ class CommandAlertMessage(CommandAlertMixin, SystemMessage):
|
||||||
}
|
}
|
||||||
context = {
|
context = {
|
||||||
'items': items,
|
'items': items,
|
||||||
'session_url': session_detail_url,
|
|
||||||
"command": command['input'],
|
"command": command['input'],
|
||||||
}
|
}
|
||||||
|
context.update(other_context)
|
||||||
message = render_to_string('terminal/_msg_command_alert.html', context)
|
message = render_to_string('terminal/_msg_command_alert.html', context)
|
||||||
return {
|
return {'subject': self.subject, 'message': message}
|
||||||
'subject': self.subject,
|
|
||||||
'message': message
|
def get_wecom_msg(self):
|
||||||
}
|
session_url = wecom_tool.wrap_redirect_url(
|
||||||
|
self.get_session_url(external=False)
|
||||||
|
)
|
||||||
|
message = self.gen_html_string(session_url=session_url)
|
||||||
|
return self.html_to_markdown(message)
|
||||||
|
|
||||||
|
def get_html_msg(self) -> dict:
|
||||||
|
return self.gen_html_string(session_url=self.get_session_url())
|
||||||
|
|
||||||
|
|
||||||
class CommandExecutionAlert(CommandAlertMixin, SystemMessage):
|
class CommandExecutionAlert(CommandAlertMixin, SystemMessage):
|
||||||
|
@ -189,16 +197,20 @@ class CommandExecutionAlert(CommandAlertMixin, SystemMessage):
|
||||||
}
|
}
|
||||||
return cls(cmd)
|
return cls(cmd)
|
||||||
|
|
||||||
def get_html_msg(self) -> dict:
|
def get_asset_urls(self, external=True, tran_func=None):
|
||||||
command = self.command
|
|
||||||
assets_with_url = []
|
assets_with_url = []
|
||||||
for asset in command['assets']:
|
for asset in self.command['assets']:
|
||||||
url = reverse(
|
url = reverse(
|
||||||
'assets:asset-detail', kwargs={'pk': asset.id},
|
'assets:asset-detail', kwargs={'pk': asset.id},
|
||||||
api_to_ui=True, external=True, is_console=True
|
api_to_ui=True, external=external, is_console=True
|
||||||
) + '?oid={}'.format(asset.org_id)
|
) + '?oid={}'.format(asset.org_id)
|
||||||
|
if tran_func:
|
||||||
|
url = tran_func(url)
|
||||||
assets_with_url.append([asset, url])
|
assets_with_url.append([asset, url])
|
||||||
|
return assets_with_url
|
||||||
|
|
||||||
|
def gen_html_string(self, **other_context):
|
||||||
|
command = self.command
|
||||||
level = RiskLevelChoices.get_label(command['risk_level'])
|
level = RiskLevelChoices.get_label(command['risk_level'])
|
||||||
|
|
||||||
items = {
|
items = {
|
||||||
|
@ -206,17 +218,23 @@ class CommandExecutionAlert(CommandAlertMixin, SystemMessage):
|
||||||
_("Level"): level,
|
_("Level"): level,
|
||||||
_("Date"): local_now_display(),
|
_("Date"): local_now_display(),
|
||||||
}
|
}
|
||||||
|
|
||||||
context = {
|
context = {
|
||||||
'items': items,
|
'items': items,
|
||||||
'assets_with_url': assets_with_url,
|
|
||||||
'command': command['input'],
|
'command': command['input'],
|
||||||
}
|
}
|
||||||
|
context.update(other_context)
|
||||||
message = render_to_string('terminal/_msg_command_execute_alert.html', context)
|
message = render_to_string('terminal/_msg_command_execute_alert.html', context)
|
||||||
return {
|
return {'subject': self.subject, 'message': message}
|
||||||
'subject': self.subject,
|
|
||||||
'message': message
|
def get_wecom_msg(self):
|
||||||
}
|
assets_with_url = self.get_asset_urls(
|
||||||
|
external=False, tran_func=wecom_tool.wrap_redirect_url
|
||||||
|
)
|
||||||
|
message = self.gen_html_string(assets_with_url=assets_with_url)
|
||||||
|
return self.html_to_markdown(message)
|
||||||
|
|
||||||
|
def get_html_msg(self) -> dict:
|
||||||
|
return self.gen_html_string(assets_with_url=self.get_asset_urls())
|
||||||
|
|
||||||
|
|
||||||
class StorageConnectivityMessage(SystemMessage):
|
class StorageConnectivityMessage(SystemMessage):
|
||||||
|
|
|
@ -4,12 +4,12 @@ from urllib.parse import urljoin
|
||||||
from django.conf import settings
|
from django.conf import settings
|
||||||
from django.core.cache import cache
|
from django.core.cache import cache
|
||||||
from django.forms import model_to_dict
|
from django.forms import model_to_dict
|
||||||
from django.shortcuts import reverse
|
|
||||||
from django.template.loader import render_to_string
|
from django.template.loader import render_to_string
|
||||||
from django.utils.translation import gettext_lazy as _
|
from django.utils.translation import gettext_lazy as _
|
||||||
|
|
||||||
from common.db.encoder import ModelJSONFieldEncoder
|
from common.db.encoder import ModelJSONFieldEncoder
|
||||||
from common.utils import get_logger, random_string
|
from common.sdk.im.wecom import wecom_tool
|
||||||
|
from common.utils import get_logger, random_string, reverse
|
||||||
from notifications.notifications import UserMessage
|
from notifications.notifications import UserMessage
|
||||||
from . import const
|
from . import const
|
||||||
from .models import Ticket
|
from .models import Ticket
|
||||||
|
@ -22,16 +22,13 @@ class BaseTicketMessage(UserMessage):
|
||||||
ticket: Ticket
|
ticket: Ticket
|
||||||
content_title: str
|
content_title: str
|
||||||
|
|
||||||
@property
|
def get_ticket_detail_url(self, external=True):
|
||||||
def ticket_detail_url(self):
|
detail_url = const.TICKET_DETAIL_URL.format(
|
||||||
tp = self.ticket.type
|
id=str(self.ticket.id), type=self.ticket.type
|
||||||
return urljoin(
|
|
||||||
settings.SITE_URL,
|
|
||||||
const.TICKET_DETAIL_URL.format(
|
|
||||||
id=str(self.ticket.id),
|
|
||||||
type=tp
|
|
||||||
)
|
|
||||||
)
|
)
|
||||||
|
if not external:
|
||||||
|
return detail_url
|
||||||
|
return urljoin(settings.SITE_URL, detail_url)
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def content_title(self):
|
def content_title(self):
|
||||||
|
@ -41,17 +38,31 @@ class BaseTicketMessage(UserMessage):
|
||||||
def subject(self):
|
def subject(self):
|
||||||
raise NotImplementedError
|
raise NotImplementedError
|
||||||
|
|
||||||
def get_html_msg(self) -> dict:
|
def get_html_context(self):
|
||||||
context = dict(
|
return {'ticket_detail_url': self.get_ticket_detail_url()}
|
||||||
title=self.content_title,
|
|
||||||
content=self.content,
|
def get_wecom_context(self):
|
||||||
ticket_detail_url=self.ticket_detail_url
|
ticket_detail_url = wecom_tool.wrap_redirect_url(
|
||||||
)
|
[self.get_ticket_detail_url(external=False)]
|
||||||
message = render_to_string('tickets/_msg_ticket.html', context)
|
)[0]
|
||||||
return {
|
return {'ticket_detail_url': ticket_detail_url}
|
||||||
'subject': self.subject,
|
|
||||||
'message': message
|
def gen_html_string(self, **other_context):
|
||||||
|
context = {
|
||||||
|
'title': self.content_title, 'content': self.content,
|
||||||
}
|
}
|
||||||
|
context.update(other_context)
|
||||||
|
message = render_to_string(
|
||||||
|
'tickets/_msg_ticket.html', context
|
||||||
|
)
|
||||||
|
return {'subject': self.subject, 'message': message}
|
||||||
|
|
||||||
|
def get_html_msg(self) -> dict:
|
||||||
|
return self.gen_html_string(**self.get_html_context())
|
||||||
|
|
||||||
|
def get_wecom_msg(self):
|
||||||
|
message = self.gen_html_string(**self.get_wecom_context())
|
||||||
|
return self.html_to_markdown(message)
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def gen_test_msg(cls):
|
def gen_test_msg(cls):
|
||||||
|
@ -113,27 +124,21 @@ class TicketAppliedToAssigneeMessage(BaseTicketMessage):
|
||||||
)
|
)
|
||||||
return title
|
return title
|
||||||
|
|
||||||
def get_ticket_approval_url(self):
|
def get_ticket_approval_url(self, external=True):
|
||||||
url = reverse('tickets:direct-approve', kwargs={'token': self.token})
|
url = reverse('tickets:direct-approve', kwargs={'token': self.token})
|
||||||
|
if not external:
|
||||||
|
return url
|
||||||
return urljoin(settings.SITE_URL, url)
|
return urljoin(settings.SITE_URL, url)
|
||||||
|
|
||||||
def get_html_msg(self) -> dict:
|
def get_html_context(self):
|
||||||
context = dict(
|
context = super().get_html_context()
|
||||||
title=self.content_title,
|
context['ticket_approval_url'] = self.get_ticket_approval_url()
|
||||||
content=self.content,
|
data = {
|
||||||
ticket_detail_url=self.ticket_detail_url
|
'ticket_id': self.ticket.id,
|
||||||
)
|
'approver_id': self.user.id, 'content': self.content,
|
||||||
|
|
||||||
ticket_approval_url = self.get_ticket_approval_url()
|
|
||||||
context.update({'ticket_approval_url': ticket_approval_url})
|
|
||||||
message = render_to_string('tickets/_msg_ticket.html', context)
|
|
||||||
cache.set(self.token, {
|
|
||||||
'ticket_id': self.ticket.id, 'approver_id': self.user.id,
|
|
||||||
'content': self.content,
|
|
||||||
}, 3600)
|
|
||||||
return {
|
|
||||||
'subject': self.subject, 'message': message
|
|
||||||
}
|
}
|
||||||
|
cache.set(self.token, data, 3600)
|
||||||
|
return context
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def gen_test_msg(cls):
|
def gen_test_msg(cls):
|
||||||
|
|
File diff suppressed because it is too large
Load Diff
|
@ -16,60 +16,32 @@ documentation = "https://docs.jumpserver.org"
|
||||||
|
|
||||||
[tool.poetry.dependencies]
|
[tool.poetry.dependencies]
|
||||||
python = "^3.11"
|
python = "^3.11"
|
||||||
# cython = "3.0.0"
|
|
||||||
aiofiles = "23.1.0"
|
aiofiles = "23.1.0"
|
||||||
amqp = "5.1.1"
|
|
||||||
ansible-core = { url = "https://github.com/jumpserver-dev/ansible/archive/refs/tags/v2.14.1.7.zip" }
|
ansible-core = { url = "https://github.com/jumpserver-dev/ansible/archive/refs/tags/v2.14.1.7.zip" }
|
||||||
ansible = "7.1.0"
|
ansible = "7.1.0"
|
||||||
ansible-runner = { url = "https://github.com/jumpserver-dev/ansible-runner/archive/refs/tags/2.4.0.1.zip" }
|
ansible-runner = { url = "https://github.com/jumpserver-dev/ansible-runner/archive/refs/tags/2.4.0.1.zip" }
|
||||||
asn1crypto = "1.5.1"
|
asn1crypto = "1.5.1"
|
||||||
bcrypt = "4.0.1"
|
|
||||||
billiard = "4.1.0"
|
|
||||||
# certifi = "2023.7.22"
|
|
||||||
# cffi = "1.15.1"
|
|
||||||
chardet = "5.1.0"
|
|
||||||
configparser = "6.0.0"
|
configparser = "6.0.0"
|
||||||
decorator = "5.1.1"
|
decorator = "5.1.1"
|
||||||
docutils = "0.20.1"
|
|
||||||
ecdsa = "0.18.0"
|
|
||||||
enum-compat = "0.0.3"
|
enum-compat = "0.0.3"
|
||||||
ephem = "4.1.4"
|
ephem = "4.1.4"
|
||||||
future = "0.18.3"
|
|
||||||
# idna = "3.4"
|
|
||||||
itypes = "1.2.0"
|
|
||||||
jinja2 = "3.1.2"
|
|
||||||
markupsafe = "2.1.3"
|
|
||||||
olefile = "0.46"
|
olefile = "0.46"
|
||||||
paramiko = "3.2.0"
|
|
||||||
passlib = "1.7.4"
|
passlib = "1.7.4"
|
||||||
pyasn1 = "0.5.0"
|
|
||||||
pycparser = "2.21"
|
|
||||||
# cryptography = "41.0.2"
|
|
||||||
pycryptodome = "3.18.0"
|
|
||||||
pycryptodomex = "3.18.0"
|
|
||||||
phonenumbers = "8.13.17"
|
phonenumbers = "8.13.17"
|
||||||
gmssl = "3.2.2"
|
gmssl = "3.2.2"
|
||||||
itsdangerous = "1.1.0"
|
itsdangerous = "1.1.0"
|
||||||
pyotp = "2.8.0"
|
pyotp = "2.8.0"
|
||||||
pynacl = "1.5.0"
|
|
||||||
python-dateutil = "2.8.2"
|
|
||||||
pyyaml = "6.0.1"
|
|
||||||
requests = "2.31.0"
|
|
||||||
jms-storage = "^0.0.59"
|
|
||||||
simplejson = "3.19.1"
|
simplejson = "3.19.1"
|
||||||
six = "1.16.0"
|
|
||||||
sshtunnel = "0.4.0"
|
sshtunnel = "0.4.0"
|
||||||
sshpubkeys = "3.3.1"
|
sshpubkeys = "3.3.1"
|
||||||
uritemplate = "4.1.1"
|
|
||||||
# urllib3 = "1.26.16"
|
|
||||||
vine = "5.0.0"
|
|
||||||
werkzeug = "2.3.6"
|
werkzeug = "2.3.6"
|
||||||
unicodecsv = "0.14.1"
|
unicodecsv = "0.14.1"
|
||||||
httpsig = "1.3.0"
|
httpsig = "1.3.0"
|
||||||
treelib = "1.6.4"
|
treelib = "1.6.4"
|
||||||
psutil = "5.9.5"
|
psutil = "6.0.0"
|
||||||
msrestazure = "0.6.4"
|
msrestazure = "0.6.4"
|
||||||
adal = "1.2.7"
|
adal = "1.2.7"
|
||||||
|
# Requires fixed version number, ImportError: cannot import name 'save_virtual_workbook' from 'openpyxl.writer.excel'
|
||||||
openpyxl = "3.0.10"
|
openpyxl = "3.0.10"
|
||||||
pyexcel = "0.7.0"
|
pyexcel = "0.7.0"
|
||||||
pyexcel-xlsx = "0.6.0"
|
pyexcel-xlsx = "0.6.0"
|
||||||
|
@ -90,10 +62,7 @@ django = "4.1.13"
|
||||||
django-bootstrap3 = "23.4"
|
django-bootstrap3 = "23.4"
|
||||||
django-filter = "23.2"
|
django-filter = "23.2"
|
||||||
django-formtools = "2.4.1"
|
django-formtools = "2.4.1"
|
||||||
django-ranged-response = "0.2.0"
|
|
||||||
django-simple-captcha = "0.5.18"
|
django-simple-captcha = "0.5.18"
|
||||||
django-timezone-field = "5.1"
|
|
||||||
djangorestframework = "3.14.0"
|
|
||||||
djangorestframework-bulk = "0.2.1"
|
djangorestframework-bulk = "0.2.1"
|
||||||
django-simple-history = "3.3.0"
|
django-simple-history = "3.3.0"
|
||||||
django-private-storage = "3.1"
|
django-private-storage = "3.1"
|
||||||
|
@ -101,33 +70,29 @@ drf-nested-routers = "0.93.4"
|
||||||
drf-writable-nested = "0.7.0"
|
drf-writable-nested = "0.7.0"
|
||||||
rest-condition = "1.0.3"
|
rest-condition = "1.0.3"
|
||||||
drf-yasg = "1.21.7"
|
drf-yasg = "1.21.7"
|
||||||
coreapi = "2.3.3"
|
|
||||||
coreschema = "0.0.4"
|
|
||||||
openapi-codec = "1.3.2"
|
openapi-codec = "1.3.2"
|
||||||
pillow = "10.0.1"
|
|
||||||
pytz = "2023.3"
|
|
||||||
django-proxy = "1.2.2"
|
django-proxy = "1.2.2"
|
||||||
python-daemon = "3.0.1"
|
|
||||||
eventlet = "0.33.3"
|
eventlet = "0.33.3"
|
||||||
greenlet = "3.0.1"
|
|
||||||
gunicorn = "21.2.0"
|
gunicorn = "21.2.0"
|
||||||
celery = "5.3.1"
|
|
||||||
flower = "2.0.1"
|
flower = "2.0.1"
|
||||||
django-celery-beat = "2.6.0"
|
django-celery-beat = "2.6.0"
|
||||||
kombu = "5.3.1"
|
|
||||||
uvicorn = "0.22.0"
|
uvicorn = "0.22.0"
|
||||||
websockets = "11.0.3"
|
websockets = "11.0.3"
|
||||||
python-ldap = "3.4.3"
|
python-ldap = [
|
||||||
|
{ url = "https://github.com/jumpserver-dev/core-package/releases/download/v1.0.0/python_ldap-3.4.3-cp311-cp311-manylinux_2_28_x86_64.whl", markers = "sys_platform == 'linux' and platform_machine == 'x86_64'" },
|
||||||
|
{ url = "https://github.com/jumpserver-dev/core-package/releases/download/v1.0.0/python_ldap-3.4.3-cp311-cp311-manylinux_2_28_aarch64.whl", markers = "sys_platform == 'linux' and platform_machine == 'aarch64'" },
|
||||||
|
{ version = "3.4.3", source = "aliyun", markers = "sys_platform != 'linux'" }
|
||||||
|
]
|
||||||
ldap3 = "2.9.1"
|
ldap3 = "2.9.1"
|
||||||
django-radius = { url = "https://github.com/ibuler/django-radius/archive/refs/tags/1.5.0.zip" }
|
django-radius = { url = "https://github.com/ibuler/django-radius/archive/refs/tags/1.5.0.zip" }
|
||||||
django-cas-ng = { url = "https://github.com/ibuler/django-cas-ng/releases/download/v4.3.2/django-cas-ng-4.3.2.zip" }
|
django-cas-ng = { url = "https://github.com/ibuler/django-cas-ng/releases/download/v4.3.2/django-cas-ng-4.3.2.zip" }
|
||||||
python-cas = "1.6.0"
|
|
||||||
django-auth-ldap = "4.4.0"
|
django-auth-ldap = "4.4.0"
|
||||||
boto3 = "1.28.9"
|
mysqlclient = [
|
||||||
botocore = "1.31.9"
|
{ url = "https://github.com/jumpserver-dev/core-package/releases/download/v1.0.0/mysqlclient-2.2.4-cp311-cp311-manylinux_2_28_x86_64.whl", markers = "sys_platform == 'linux' and platform_machine == 'x86_64'" },
|
||||||
s3transfer = "0.6.1"
|
{ url = "https://github.com/jumpserver-dev/core-package/releases/download/v1.0.0/mysqlclient-2.2.4-cp311-cp311-manylinux_2_28_aarch64.whl", markers = "sys_platform == 'linux' and platform_machine == 'aarch64'" },
|
||||||
mysqlclient = "2.2.4"
|
{ version = "2.2.4", source = "aliyun", markers = "sys_platform != 'linux'" }
|
||||||
pymssql = "2.2.8"
|
]
|
||||||
|
pymssql = "2.2.11"
|
||||||
django-redis = "5.3.0"
|
django-redis = "5.3.0"
|
||||||
python-redis-lock = "4.0.0"
|
python-redis-lock = "4.0.0"
|
||||||
pyopenssl = "23.2.0"
|
pyopenssl = "23.2.0"
|
||||||
|
@ -138,33 +103,29 @@ forgerypy3 = "0.3.1"
|
||||||
django-debug-toolbar = "4.1.0"
|
django-debug-toolbar = "4.1.0"
|
||||||
pympler = "1.0.1"
|
pympler = "1.0.1"
|
||||||
hvac = "1.1.1"
|
hvac = "1.1.1"
|
||||||
pyhcl = "0.4.4"
|
|
||||||
ipy = "1.1"
|
ipy = "1.1"
|
||||||
netifaces = "^0.11.0"
|
netifaces = [
|
||||||
|
{ url = "https://github.com/jumpserver-dev/core-package/releases/download/v1.0.0/netifaces-0.11.0-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_28_x86_64.whl", markers = "sys_platform == 'linux' and platform_machine == 'x86_64'" },
|
||||||
|
{ url = "https://github.com/jumpserver-dev/core-package/releases/download/v1.0.0/netifaces-0.11.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", markers = "sys_platform == 'linux' and platform_machine == 'aarch64'" },
|
||||||
|
{ version = "^0.11.0", source = "aliyun", markers = "sys_platform != 'linux'" }
|
||||||
|
]
|
||||||
daphne = "4.0.0"
|
daphne = "4.0.0"
|
||||||
channels = "^4.0.0"
|
|
||||||
channels-redis = "4.1.0"
|
channels-redis = "4.1.0"
|
||||||
fido2 = "^1.1.2"
|
fido2 = "^1.1.2"
|
||||||
ua-parser = "^0.18.0"
|
|
||||||
user-agents = "^2.2.0"
|
user-agents = "^2.2.0"
|
||||||
django-cors-headers = "^4.3.0"
|
django-cors-headers = "^4.3.0"
|
||||||
mistune = "2.0.3"
|
mistune = "2.0.3"
|
||||||
openai = "^1.29.0"
|
openai = "^1.29.0"
|
||||||
xlsxwriter = "^3.1.9"
|
xlsxwriter = "^3.1.9"
|
||||||
exchangelib = "^5.1.0"
|
exchangelib = "^5.1.0"
|
||||||
xmlsec = "^1.3.13"
|
|
||||||
lxml = "5.2.1"
|
|
||||||
pydantic = "^2.7.4"
|
|
||||||
annotated-types = "^0.6.0"
|
|
||||||
httpx = "^0.27.0"
|
|
||||||
distro = "1.9.0"
|
|
||||||
tqdm = "4.66.4"
|
|
||||||
elasticsearch7 = "7.17.9"
|
elasticsearch7 = "7.17.9"
|
||||||
elasticsearch8 = "8.13.2"
|
elasticsearch8 = "8.13.2"
|
||||||
polib = "^1.2.0"
|
polib = "^1.2.0"
|
||||||
# psycopg2 = "2.9.6"
|
|
||||||
psycopg2-binary = "2.9.6"
|
psycopg2-binary = "2.9.6"
|
||||||
pycountry = "^24.6.1"
|
pycountry = "^24.6.1"
|
||||||
|
boto = "2.49.0"
|
||||||
|
azure-storage-blob = "12.17.0"
|
||||||
|
esdk-obs-python = "3.21.4"
|
||||||
|
|
||||||
[tool.poetry.group.xpack]
|
[tool.poetry.group.xpack]
|
||||||
optional = true
|
optional = true
|
||||||
|
@ -183,11 +144,9 @@ bce-python-sdk = "0.8.87"
|
||||||
tencentcloud-sdk-python = "3.0.941"
|
tencentcloud-sdk-python = "3.0.941"
|
||||||
aliyun-python-sdk-core-v3 = "2.13.33"
|
aliyun-python-sdk-core-v3 = "2.13.33"
|
||||||
aliyun-python-sdk-ecs = "4.24.64"
|
aliyun-python-sdk-ecs = "4.24.64"
|
||||||
keystoneauth1 = "5.2.1"
|
|
||||||
oracledb = "1.4.0"
|
oracledb = "1.4.0"
|
||||||
ucloud-sdk-python3 = "0.11.50"
|
ucloud-sdk-python3 = "0.11.50"
|
||||||
huaweicloudsdkecs = "3.1.52"
|
huaweicloudsdkecs = "3.1.52"
|
||||||
huaweicloudsdkcore = "3.1.52"
|
|
||||||
volcengine-python-sdk = "1.0.71"
|
volcengine-python-sdk = "1.0.71"
|
||||||
|
|
||||||
[[tool.poetry.source]]
|
[[tool.poetry.source]]
|
||||||
|
|
|
@ -18,7 +18,7 @@ for i in $need_clean; do
|
||||||
done
|
done
|
||||||
|
|
||||||
# 清理缓存文件
|
# 清理缓存文件
|
||||||
cd lib_path
|
cd ${lib_path} || exit 1
|
||||||
find . -name "*.pyc" -exec rm -f {} \;
|
find . -name "*.pyc" -exec rm -f {} \;
|
||||||
|
|
||||||
# 清理不需要的国际化文件
|
# 清理不需要的国际化文件
|
||||||
|
|
Loading…
Reference in New Issue