mirror of https://github.com/jumpserver/jumpserver
commit
e90e61e8dd
|
@ -1,5 +1,4 @@
|
|||
.git
|
||||
logs/*
|
||||
data/*
|
||||
.github
|
||||
tmp/*
|
||||
|
|
|
@ -6,8 +6,7 @@ labels: 类型:需求
|
|||
assignees:
|
||||
- ibuler
|
||||
- baijiangjie
|
||||
|
||||
|
||||
- wojiushixiaobai
|
||||
---
|
||||
|
||||
**请描述您的需求或者改进建议.**
|
||||
|
|
|
@ -21,17 +21,44 @@ jobs:
|
|||
actions: 'remove-labels'
|
||||
labels: '状态:待反馈'
|
||||
|
||||
add-label-if-not-author:
|
||||
add-label-if-is-member:
|
||||
runs-on: ubuntu-latest
|
||||
if: (github.event.issue.user.id != github.event.comment.user.id) && !github.event.issue.pull_request && (github.event.issue.state == 'open')
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v2
|
||||
|
||||
- name: Get Organization name
|
||||
id: org_name
|
||||
run: echo "data=$(echo '${{ github.repository }}' | cut -d '/' -f 1)" >> $GITHUB_OUTPUT
|
||||
|
||||
- name: Get Organization public members
|
||||
uses: octokit/request-action@v2.x
|
||||
id: members
|
||||
with:
|
||||
route: GET /orgs/${{ steps.org_name.outputs.data }}/public_members
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- name: Process public members data
|
||||
# 将 members 中的数据转化为 login 字段的拼接字符串
|
||||
id: member_names
|
||||
run: echo "data=$(echo '${{ steps.members.outputs.data }}' | jq '[.[].login] | join(",")')" >> $GITHUB_OUTPUT
|
||||
|
||||
|
||||
- run: "echo members: '${{ steps.members.outputs.data }}'"
|
||||
- run: "echo member names: '${{ steps.member_names.outputs.data }}'"
|
||||
- run: "echo comment user: '${{ github.event.comment.user.login }}'"
|
||||
- run: "echo contains? : '${{ contains(steps.member_names.outputs.data, github.event.comment.user.login) }}'"
|
||||
|
||||
- name: Add require replay label
|
||||
if: contains(steps.member_names.outputs.data, github.event.comment.user.login)
|
||||
uses: actions-cool/issues-helper@v2
|
||||
with:
|
||||
actions: 'add-labels'
|
||||
labels: '状态:待反馈'
|
||||
|
||||
- name: Remove require handle label
|
||||
if: contains(steps.member_names.outputs.data, github.event.comment.user.login)
|
||||
uses: actions-cool/issues-helper@v2
|
||||
with:
|
||||
actions: 'remove-labels'
|
||||
|
|
|
@ -35,7 +35,6 @@ celerybeat-schedule.db
|
|||
docs/_build/
|
||||
xpack
|
||||
xpack.bak
|
||||
logs/*
|
||||
### Vagrant ###
|
||||
.vagrant/
|
||||
release/*
|
||||
|
|
32
Dockerfile
32
Dockerfile
|
@ -1,4 +1,4 @@
|
|||
FROM python:3.9-slim-buster as stage-build
|
||||
FROM jumpserver/python:3.9-slim-buster as stage-build
|
||||
ARG TARGETARCH
|
||||
|
||||
ARG VERSION
|
||||
|
@ -8,7 +8,7 @@ WORKDIR /opt/jumpserver
|
|||
ADD . .
|
||||
RUN cd utils && bash -ixeu build.sh
|
||||
|
||||
FROM python:3.9-slim-buster
|
||||
FROM jumpserver/python:3.9-slim-buster
|
||||
ARG TARGETARCH
|
||||
MAINTAINER JumpServer Team <ibuler@qq.com>
|
||||
|
||||
|
@ -24,6 +24,7 @@ ARG DEPENDENCIES=" \
|
|||
libjpeg-dev \
|
||||
libldap2-dev \
|
||||
libsasl2-dev \
|
||||
libssl-dev \
|
||||
libxml2-dev \
|
||||
libxmlsec1-dev \
|
||||
libxmlsec1-openssl \
|
||||
|
@ -66,27 +67,36 @@ RUN --mount=type=cache,target=/var/cache/apt,sharing=locked,id=core \
|
|||
|
||||
ARG DOWNLOAD_URL=https://download.jumpserver.org
|
||||
|
||||
RUN mkdir -p /opt/oracle/ \
|
||||
&& cd /opt/oracle/ \
|
||||
&& wget ${DOWNLOAD_URL}/public/instantclient-basiclite-linux.${TARGETARCH}-19.10.0.0.0.zip \
|
||||
&& unzip instantclient-basiclite-linux.${TARGETARCH}-19.10.0.0.0.zip \
|
||||
&& sh -c "echo /opt/oracle/instantclient_19_10 > /etc/ld.so.conf.d/oracle-instantclient.conf" \
|
||||
&& ldconfig \
|
||||
&& rm -f instantclient-basiclite-linux.${TARGETARCH}-19.10.0.0.0.zip
|
||||
RUN set -ex \
|
||||
&& \
|
||||
if [ "${TARGETARCH}" == "amd64" ] || [ "${TARGETARCH}" == "arm64" ]; then \
|
||||
mkdir -p /opt/oracle; \
|
||||
cd /opt/oracle; \
|
||||
wget ${DOWNLOAD_URL}/public/instantclient-basiclite-linux.${TARGETARCH}-19.10.0.0.0.zip; \
|
||||
unzip instantclient-basiclite-linux.${TARGETARCH}-19.10.0.0.0.zip; \
|
||||
echo "/opt/oracle/instantclient_19_10" > /etc/ld.so.conf.d/oracle-instantclient.conf; \
|
||||
ldconfig; \
|
||||
rm -f instantclient-basiclite-linux.${TARGETARCH}-19.10.0.0.0.zip; \
|
||||
fi
|
||||
|
||||
WORKDIR /tmp/build
|
||||
COPY ./requirements ./requirements
|
||||
|
||||
ARG PIP_MIRROR=https://pypi.douban.com/simple
|
||||
ENV PIP_MIRROR=$PIP_MIRROR
|
||||
ARG PIP_JMS_MIRROR=https://pypi.douban.com/simple
|
||||
ENV PIP_JMS_MIRROR=$PIP_JMS_MIRROR
|
||||
|
||||
RUN --mount=type=cache,target=/root/.cache/pip \
|
||||
set -ex \
|
||||
&& pip config set global.index-url ${PIP_MIRROR} \
|
||||
&& pip install --upgrade pip \
|
||||
&& pip install --upgrade setuptools wheel \
|
||||
&& \
|
||||
if [ "${TARGETARCH}" == "loong64" ]; then \
|
||||
pip install https://download.jumpserver.org/pypi/simple/cryptography/cryptography-38.0.4-cp39-cp39-linux_loongarch64.whl; \
|
||||
pip install https://download.jumpserver.org/pypi/simple/greenlet/greenlet-1.1.2-cp39-cp39-linux_loongarch64.whl; \
|
||||
pip install https://download.jumpserver.org/pypi/simple/PyNaCl/PyNaCl-1.5.0-cp39-cp39-linux_loongarch64.whl; \
|
||||
pip install https://download.jumpserver.org/pypi/simple/grpcio/grpcio-1.54.2-cp39-cp39-linux_loongarch64.whl; \
|
||||
fi \
|
||||
&& pip install $(grep -E 'jms|jumpserver' requirements/requirements.txt) -i ${PIP_JMS_MIRROR} \
|
||||
&& pip install -r requirements/requirements.txt
|
||||
|
||||
|
|
|
@ -1,97 +0,0 @@
|
|||
FROM python:3.9-slim-buster as stage-build
|
||||
ARG TARGETARCH
|
||||
|
||||
ARG VERSION
|
||||
ENV VERSION=$VERSION
|
||||
|
||||
WORKDIR /opt/jumpserver
|
||||
ADD . .
|
||||
RUN cd utils && bash -ixeu build.sh
|
||||
|
||||
FROM python:3.9-slim-buster
|
||||
ARG TARGETARCH
|
||||
MAINTAINER JumpServer Team <ibuler@qq.com>
|
||||
|
||||
ARG BUILD_DEPENDENCIES=" \
|
||||
g++ \
|
||||
make \
|
||||
pkg-config"
|
||||
|
||||
ARG DEPENDENCIES=" \
|
||||
freetds-dev \
|
||||
libpq-dev \
|
||||
libffi-dev \
|
||||
libjpeg-dev \
|
||||
libldap2-dev \
|
||||
libsasl2-dev \
|
||||
libssl-dev \
|
||||
libxml2-dev \
|
||||
libxmlsec1-dev \
|
||||
libxmlsec1-openssl \
|
||||
freerdp2-dev \
|
||||
libaio-dev"
|
||||
|
||||
ARG TOOLS=" \
|
||||
ca-certificates \
|
||||
curl \
|
||||
default-libmysqlclient-dev \
|
||||
default-mysql-client \
|
||||
locales \
|
||||
openssh-client \
|
||||
procps \
|
||||
sshpass \
|
||||
telnet \
|
||||
unzip \
|
||||
vim \
|
||||
git \
|
||||
wget"
|
||||
|
||||
RUN --mount=type=cache,target=/var/cache/apt,sharing=locked,id=core \
|
||||
set -ex \
|
||||
&& ln -sf /usr/share/zoneinfo/Asia/Shanghai /etc/localtime \
|
||||
&& apt-get update \
|
||||
&& apt-get -y install --no-install-recommends ${BUILD_DEPENDENCIES} \
|
||||
&& apt-get -y install --no-install-recommends ${DEPENDENCIES} \
|
||||
&& apt-get -y install --no-install-recommends ${TOOLS} \
|
||||
&& mkdir -p /root/.ssh/ \
|
||||
&& echo "Host *\n\tStrictHostKeyChecking no\n\tUserKnownHostsFile /dev/null\n\tCiphers +aes128-cbc\n\tKexAlgorithms +diffie-hellman-group1-sha1\n\tHostKeyAlgorithms +ssh-rsa" > /root/.ssh/config \
|
||||
&& echo "set mouse-=a" > ~/.vimrc \
|
||||
&& echo "no" | dpkg-reconfigure dash \
|
||||
&& echo "zh_CN.UTF-8" | dpkg-reconfigure locales \
|
||||
&& sed -i "s@# export @export @g" ~/.bashrc \
|
||||
&& sed -i "s@# alias @alias @g" ~/.bashrc \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
|
||||
WORKDIR /tmp/build
|
||||
COPY ./requirements ./requirements
|
||||
|
||||
ARG PIP_MIRROR=https://pypi.douban.com/simple
|
||||
ENV PIP_MIRROR=$PIP_MIRROR
|
||||
ARG PIP_JMS_MIRROR=https://pypi.douban.com/simple
|
||||
ENV PIP_JMS_MIRROR=$PIP_JMS_MIRROR
|
||||
|
||||
RUN --mount=type=cache,target=/root/.cache/pip \
|
||||
set -ex \
|
||||
&& pip config set global.index-url ${PIP_MIRROR} \
|
||||
&& pip install --upgrade pip \
|
||||
&& pip install --upgrade setuptools wheel \
|
||||
&& pip install https://download.jumpserver.org/pypi/simple/cryptography/cryptography-38.0.4-cp39-cp39-linux_loongarch64.whl \
|
||||
&& pip install https://download.jumpserver.org/pypi/simple/greenlet/greenlet-1.1.2-cp39-cp39-linux_loongarch64.whl \
|
||||
&& pip install https://download.jumpserver.org/pypi/simple/PyNaCl/PyNaCl-1.5.0-cp39-cp39-linux_loongarch64.whl \
|
||||
&& pip install https://download.jumpserver.org/pypi/simple/grpcio/grpcio-1.54.2-cp39-cp39-linux_loongarch64.whl \
|
||||
&& pip install $(grep -E 'jms|jumpserver' requirements/requirements.txt) -i ${PIP_JMS_MIRROR} \
|
||||
&& pip install -r requirements/requirements.txt
|
||||
|
||||
COPY --from=stage-build /opt/jumpserver/release/jumpserver /opt/jumpserver
|
||||
RUN echo > /opt/jumpserver/config.yml \
|
||||
&& rm -rf /tmp/build
|
||||
|
||||
WORKDIR /opt/jumpserver
|
||||
VOLUME /opt/jumpserver/data
|
||||
VOLUME /opt/jumpserver/logs
|
||||
|
||||
ENV LANG=zh_CN.UTF-8
|
||||
|
||||
EXPOSE 8080
|
||||
|
||||
ENTRYPOINT ["./entrypoint.sh"]
|
48
README.md
48
README.md
|
@ -17,18 +17,16 @@
|
|||
9 年时间,倾情投入,用心做好一款开源堡垒机。
|
||||
</p>
|
||||
|
||||
| :warning: 注意 :warning: |
|
||||
|:-------------------------------------------------------------------------------------------------------------------------:|
|
||||
| 3.0 架构上和 2.0 变化较大,建议全新安装一套环境来体验。如需升级,请务必升级前进行备份,并[查阅文档](https://kb.fit2cloud.com/?p=06638d69-f109-4333-b5bf-65b17b297ed9) |
|
||||
JumpServer 是广受欢迎的开源堡垒机,是符合 4A 规范的专业运维安全审计系统。
|
||||
|
||||
--------------------------
|
||||
|
||||
JumpServer 是广受欢迎的开源堡垒机,是符合 4A 规范的专业运维安全审计系统。JumpServer 堡垒机帮助企业以更安全的方式管控和登录各种类型的资产,包括:
|
||||
JumpServer 堡垒机帮助企业以更安全的方式管控和登录各种类型的资产,包括:
|
||||
|
||||
- **SSH**: Linux / Unix / 网络设备 等;
|
||||
- **Windows**: Web 方式连接 / 原生 RDP 连接;
|
||||
- **数据库**: MySQL / Oracle / SQLServer / PostgreSQL 等;
|
||||
- **Kubernetes**: 支持连接到 K8s 集群中的 Pods;
|
||||
- **数据库**: MySQL / MariaDB / PostgreSQL / Oracle / SQLServer / ClickHouse 等;
|
||||
- **NoSQL**: Redis / MongoDB 等;
|
||||
- **GPT**: ChatGPT 等;
|
||||
- **云服务**: Kubernetes / VMware vSphere 等;
|
||||
- **Web 站点**: 各类系统的 Web 管理后台;
|
||||
- **应用**: 通过 Remote App 连接各类应用。
|
||||
|
||||
|
@ -81,11 +79,7 @@ JumpServer 是广受欢迎的开源堡垒机,是符合 4A 规范的专业运
|
|||
|
||||
如果您在使用过程中有任何疑问或对建议,欢迎提交 [GitHub Issue](https://github.com/jumpserver/jumpserver/issues/new/choose)。
|
||||
|
||||
您也可以到我们的 [社区论坛](https://bbs.fit2cloud.com/c/js/5) 及微信交流群当中进行交流沟通。
|
||||
|
||||
**微信交流群**
|
||||
|
||||
<img src="https://download.jumpserver.org/images/wecom-group.jpeg" alt="微信群二维码" width="200"/>
|
||||
您也可以到我们的 [社区论坛](https://bbs.fit2cloud.com/c/js/5) 当中进行交流沟通。
|
||||
|
||||
### 参与贡献
|
||||
|
||||
|
@ -95,15 +89,20 @@ JumpServer 是广受欢迎的开源堡垒机,是符合 4A 规范的专业运
|
|||
|
||||
## 组件项目
|
||||
|
||||
| 项目 | 状态 | 描述 |
|
||||
|--------------------------------------------------------|------------------------------------------------------------------------------------------------------------------------------------------------------------------------|-----------------------------------------------------------------------------------------|
|
||||
| [Lina](https://github.com/jumpserver/lina) | <a href="https://github.com/jumpserver/lina/releases"><img alt="Lina release" src="https://img.shields.io/github/release/jumpserver/lina.svg" /></a> | JumpServer Web UI 项目 |
|
||||
| [Luna](https://github.com/jumpserver/luna) | <a href="https://github.com/jumpserver/luna/releases"><img alt="Luna release" src="https://img.shields.io/github/release/jumpserver/luna.svg" /></a> | JumpServer Web Terminal 项目 |
|
||||
| [KoKo](https://github.com/jumpserver/koko) | <a href="https://github.com/jumpserver/koko/releases"><img alt="Koko release" src="https://img.shields.io/github/release/jumpserver/koko.svg" /></a> | JumpServer 字符协议 Connector 项目,替代原来 Python 版本的 [Coco](https://github.com/jumpserver/coco) |
|
||||
| [Lion](https://github.com/jumpserver/lion-release) | <a href="https://github.com/jumpserver/lion-release/releases"><img alt="Lion release" src="https://img.shields.io/github/release/jumpserver/lion-release.svg" /></a> | JumpServer 图形协议 Connector 项目,依赖 [Apache Guacamole](https://guacamole.apache.org/) |
|
||||
| [Magnus](https://github.com/jumpserver/magnus-release) | <a href="https://github.com/jumpserver/magnus-release/releases"><img alt="Magnus release" src="https://img.shields.io/github/release/jumpserver/magnus-release.svg" /> | JumpServer 数据库代理 Connector 项目 |
|
||||
| [Clients](https://github.com/jumpserver/clients) | <a href="https://github.com/jumpserver/clients/releases"><img alt="Clients release" src="https://img.shields.io/github/release/jumpserver/clients.svg" /> | JumpServer 客户端 项目 |
|
||||
| [Installer](https://github.com/jumpserver/installer) | <a href="https://github.com/jumpserver/installer/releases"><img alt="Installer release" src="https://img.shields.io/github/release/jumpserver/installer.svg" /> | JumpServer 安装包 项目 |
|
||||
| 项目 | 状态 | 描述 |
|
||||
|--------------------------------------------------------|------------------------------------------------------------------------------------------------------------------------------------------------------------------------|-----------------------------------------------------------------------------------|
|
||||
| [Lina](https://github.com/jumpserver/lina) | <a href="https://github.com/jumpserver/lina/releases"><img alt="Lina release" src="https://img.shields.io/github/release/jumpserver/lina.svg" /></a> | JumpServer Web UI 项目 |
|
||||
| [Luna](https://github.com/jumpserver/luna) | <a href="https://github.com/jumpserver/luna/releases"><img alt="Luna release" src="https://img.shields.io/github/release/jumpserver/luna.svg" /></a> | JumpServer Web Terminal 项目 |
|
||||
| [KoKo](https://github.com/jumpserver/koko) | <a href="https://github.com/jumpserver/koko/releases"><img alt="Koko release" src="https://img.shields.io/github/release/jumpserver/koko.svg" /></a> | JumpServer 字符协议 Connector 项目 |
|
||||
| [Lion](https://github.com/jumpserver/lion-release) | <a href="https://github.com/jumpserver/lion-release/releases"><img alt="Lion release" src="https://img.shields.io/github/release/jumpserver/lion-release.svg" /></a> | JumpServer 图形协议 Connector 项目,依赖 [Apache Guacamole](https://guacamole.apache.org/) |
|
||||
| [Razor](https://github.com/jumpserver/razor) | <img alt="Chen" src="https://img.shields.io/badge/release-私有发布-red" /> | JumpServer RDP 代理 Connector 项目 |
|
||||
| [Tinker](https://github.com/jumpserver/tinker) | <img alt="Tinker" src="https://img.shields.io/badge/release-私有发布-red" /> | JumpServer 远程应用 Connector 项目 |
|
||||
| [Magnus](https://github.com/jumpserver/magnus-release) | <a href="https://github.com/jumpserver/magnus-release/releases"><img alt="Magnus release" src="https://img.shields.io/github/release/jumpserver/magnus-release.svg" /> | JumpServer 数据库代理 Connector 项目 |
|
||||
| [Chen](https://github.com/jumpserver/chen-release) | <a href="https://github.com/jumpserver/chen-release/releases"><img alt="Chen release" src="https://img.shields.io/github/release/jumpserver/chen-release.svg" /> | JumpServer Web DB 项目,替代原来的 OmniDB |
|
||||
| [Kael](https://github.com/jumpserver/kael) | <a href="https://github.com/jumpserver/kael/releases"><img alt="Kael release" src="https://img.shields.io/github/release/jumpserver/kael.svg" /> | JumpServer 连接 GPT 资产的组件项目 |
|
||||
| [Wisp](https://github.com/jumpserver/wisp) | <a href="https://github.com/jumpserver/wisp/releases"><img alt="Magnus release" src="https://img.shields.io/github/release/jumpserver/wisp.svg" /> | JumpServer 各系统终端组件和 Core Api 通信的组件项目 |
|
||||
| [Clients](https://github.com/jumpserver/clients) | <a href="https://github.com/jumpserver/clients/releases"><img alt="Clients release" src="https://img.shields.io/github/release/jumpserver/clients.svg" /> | JumpServer 客户端 项目 |
|
||||
| [Installer](https://github.com/jumpserver/installer) | <a href="https://github.com/jumpserver/installer/releases"><img alt="Installer release" src="https://img.shields.io/github/release/jumpserver/installer.svg" /> | JumpServer 安装包 项目 |
|
||||
|
||||
## 安全说明
|
||||
|
||||
|
@ -113,11 +112,6 @@ JumpServer是一款安全产品,请参考 [基本安全建议](https://docs.ju
|
|||
- 邮箱:support@fit2cloud.com
|
||||
- 电话:400-052-0755
|
||||
|
||||
## 致谢开源
|
||||
|
||||
- [Apache Guacamole](https://guacamole.apache.org/): Web 页面连接 RDP、SSH、VNC 等协议资产,JumpServer Lion 组件使用到该项目;
|
||||
- [OmniDB](https://omnidb.org/): Web 页面连接使用数据库,JumpServer Web 数据库组件使用到该项目。
|
||||
|
||||
## License & Copyright
|
||||
|
||||
Copyright (c) 2014-2023 飞致云 FIT2CLOUD, All rights reserved.
|
||||
|
|
|
@ -49,8 +49,7 @@ class AccountTemplateViewSet(OrgBulkModelViewSet):
|
|||
@action(methods=['get'], detail=False, url_path='su-from-account-templates')
|
||||
def su_from_account_templates(self, request, *args, **kwargs):
|
||||
pk = request.query_params.get('template_id')
|
||||
template = AccountTemplate.objects.filter(pk=pk).first()
|
||||
templates = AccountTemplate.get_su_from_account_templates(template)
|
||||
templates = AccountTemplate.get_su_from_account_templates(pk)
|
||||
templates = self.filter_queryset(templates)
|
||||
serializer = self.get_serializer(templates, many=True)
|
||||
return Response(data=serializer.data)
|
||||
|
|
|
@ -4,9 +4,58 @@ category: host
|
|||
type:
|
||||
- AIX
|
||||
method: change_secret
|
||||
params:
|
||||
- name: sudo
|
||||
type: str
|
||||
label: 'Sudo'
|
||||
default: '/bin/whoami'
|
||||
help_text: "{{ 'Params sudo help text' | trans }}"
|
||||
|
||||
- name: shell
|
||||
type: str
|
||||
label: 'Shell'
|
||||
default: '/bin/bash'
|
||||
|
||||
- name: home
|
||||
type: str
|
||||
label: "{{ 'Params home label' | trans }}"
|
||||
default: ''
|
||||
help_text: "{{ 'Params home help text' | trans }}"
|
||||
|
||||
- name: groups
|
||||
type: str
|
||||
label: "{{ 'Params groups label' | trans }}"
|
||||
default: ''
|
||||
help_text: "{{ 'Params groups help text' | trans }}"
|
||||
|
||||
i18n:
|
||||
AIX account change secret:
|
||||
zh: 使用 Ansible 模块 user 执行账号改密 (DES)
|
||||
ja: Ansible user モジュールを使用してアカウントのパスワード変更 (DES)
|
||||
en: Using Ansible module user to change account secret (DES)
|
||||
zh: '使用 Ansible 模块 user 执行账号改密 (DES)'
|
||||
ja: 'Ansible user モジュールを使用してアカウントのパスワード変更 (DES)'
|
||||
en: 'Using Ansible module user to change account secret (DES)'
|
||||
|
||||
Params sudo help text:
|
||||
zh: '使用逗号分隔多个命令,如: /bin/whoami,/sbin/ifconfig'
|
||||
ja: 'コンマで区切って複数のコマンドを入力してください。例: /bin/whoami,/sbin/ifconfig'
|
||||
en: 'Use commas to separate multiple commands, such as: /bin/whoami,/sbin/ifconfig'
|
||||
|
||||
Params home help text:
|
||||
zh: '默认家目录 /home/{账号用户名}'
|
||||
ja: 'デフォルトのホームディレクトリ /home/{アカウントユーザ名}'
|
||||
en: 'Default home directory /home/{account username}'
|
||||
|
||||
Params groups help text:
|
||||
zh: '请输入用户组,多个用户组使用逗号分隔(需填写已存在的用户组)'
|
||||
ja: 'グループを入力してください。複数のグループはコンマで区切ってください(既存のグループを入力してください)'
|
||||
en: 'Please enter the group. Multiple groups are separated by commas (please enter the existing group)'
|
||||
|
||||
Params home label:
|
||||
zh: '家目录'
|
||||
ja: 'ホームディレクトリ'
|
||||
en: 'Home'
|
||||
|
||||
Params groups label:
|
||||
zh: '用户组'
|
||||
ja: 'グループ'
|
||||
en: 'Groups'
|
||||
|
||||
|
|
|
@ -4,6 +4,26 @@
|
|||
- name: Test privileged account
|
||||
ansible.builtin.ping:
|
||||
|
||||
- name: Check user
|
||||
ansible.builtin.user:
|
||||
name: "{{ account.username }}"
|
||||
shell: "{{ params.shell }}"
|
||||
home: "{{ params.home | default('/home/' + account.username, true) }}"
|
||||
groups: "{{ params.groups }}"
|
||||
expires: -1
|
||||
state: present
|
||||
|
||||
- name: "Add {{ account.username }} group"
|
||||
ansible.builtin.group:
|
||||
name: "{{ account.username }}"
|
||||
state: present
|
||||
|
||||
- name: Add user groups
|
||||
ansible.builtin.user:
|
||||
name: "{{ account.username }}"
|
||||
groups: "{{ params.groups }}"
|
||||
when: params.groups
|
||||
|
||||
- name: Change password
|
||||
ansible.builtin.user:
|
||||
name: "{{ account.username }}"
|
||||
|
@ -33,6 +53,16 @@
|
|||
exclusive: "{{ ssh_params.exclusive }}"
|
||||
when: account.secret_type == "ssh_key"
|
||||
|
||||
- name: Set sudo setting
|
||||
ansible.builtin.lineinfile:
|
||||
dest: /etc/sudoers
|
||||
state: present
|
||||
regexp: "^{{ account.username }} ALL="
|
||||
line: "{{ account.username + ' ALL=(ALL) NOPASSWD: ' + params.sudo }}"
|
||||
validate: visudo -cf %s
|
||||
when:
|
||||
- params.sudo
|
||||
|
||||
- name: Refresh connection
|
||||
ansible.builtin.meta: reset_connection
|
||||
|
||||
|
|
|
@ -5,9 +5,59 @@ type:
|
|||
- unix
|
||||
- linux
|
||||
method: change_secret
|
||||
params:
|
||||
- name: sudo
|
||||
type: str
|
||||
label: 'Sudo'
|
||||
default: '/bin/whoami'
|
||||
help_text: "{{ 'Params sudo help text' | trans }}"
|
||||
|
||||
- name: shell
|
||||
type: str
|
||||
label: 'Shell'
|
||||
default: '/bin/bash'
|
||||
help_text: ''
|
||||
|
||||
- name: home
|
||||
type: str
|
||||
label: "{{ 'Params home label' | trans }}"
|
||||
default: ''
|
||||
help_text: "{{ 'Params home help text' | trans }}"
|
||||
|
||||
- name: groups
|
||||
type: str
|
||||
label: "{{ 'Params groups label' | trans }}"
|
||||
default: ''
|
||||
help_text: "{{ 'Params groups help text' | trans }}"
|
||||
|
||||
i18n:
|
||||
Posix account change secret:
|
||||
zh: 使用 Ansible 模块 user 执行账号改密 (SHA512)
|
||||
ja: Ansible user モジュールを使用して アカウントのパスワード変更 (SHA512)
|
||||
en: Using Ansible module user to change account secret (SHA512)
|
||||
zh: '使用 Ansible 模块 user 执行账号改密 (SHA512)'
|
||||
ja: 'Ansible user モジュールを使用して アカウントのパスワード変更 (SHA512)'
|
||||
en: 'Using Ansible module user to change account secret (SHA512)'
|
||||
|
||||
Params sudo help text:
|
||||
zh: '使用逗号分隔多个命令,如: /bin/whoami,/sbin/ifconfig'
|
||||
ja: 'コンマで区切って複数のコマンドを入力してください。例: /bin/whoami,/sbin/ifconfig'
|
||||
en: 'Use commas to separate multiple commands, such as: /bin/whoami,/sbin/ifconfig'
|
||||
|
||||
Params home help text:
|
||||
zh: '默认家目录 /home/{账号用户名}'
|
||||
ja: 'デフォルトのホームディレクトリ /home/{アカウントユーザ名}'
|
||||
en: 'Default home directory /home/{account username}'
|
||||
|
||||
Params groups help text:
|
||||
zh: '请输入用户组,多个用户组使用逗号分隔(需填写已存在的用户组)'
|
||||
ja: 'グループを入力してください。複数のグループはコンマで区切ってください(既存のグループを入力してください)'
|
||||
en: 'Please enter the group. Multiple groups are separated by commas (please enter the existing group)'
|
||||
|
||||
Params home label:
|
||||
zh: '家目录'
|
||||
ja: 'ホームディレクトリ'
|
||||
en: 'Home'
|
||||
|
||||
Params groups label:
|
||||
zh: '用户组'
|
||||
ja: 'グループ'
|
||||
en: 'Groups'
|
||||
|
||||
|
|
|
@ -8,17 +8,13 @@
|
|||
# debug:
|
||||
# msg: "Username: {{ account.username }}, Password: {{ account.secret }}"
|
||||
|
||||
|
||||
- name: Get groups of a Windows user
|
||||
ansible.windows.win_user:
|
||||
name: "{{ jms_account.username }}"
|
||||
register: user_info
|
||||
|
||||
- name: Change password
|
||||
ansible.windows.win_user:
|
||||
fullname: "{{ account.username}}"
|
||||
name: "{{ account.username }}"
|
||||
password: "{{ account.secret }}"
|
||||
groups: "{{ user_info.groups[0].name }}"
|
||||
password_never_expires: yes
|
||||
groups: "{{ params.groups }}"
|
||||
groups_action: add
|
||||
update_password: always
|
||||
ignore_errors: true
|
||||
|
|
|
@ -5,9 +5,22 @@ method: change_secret
|
|||
category: host
|
||||
type:
|
||||
- windows
|
||||
params:
|
||||
- name: groups
|
||||
type: str
|
||||
label: '用户组'
|
||||
default: 'Users,Remote Desktop Users'
|
||||
help_text: "{{ 'Params groups help text' | trans }}"
|
||||
|
||||
|
||||
i18n:
|
||||
Windows account change secret:
|
||||
zh: 使用 Ansible 模块 win_user 执行 Windows 账号改密
|
||||
ja: Ansible win_user モジュールを使用して Windows アカウントのパスワード変更
|
||||
en: Using Ansible module win_user to change Windows account secret
|
||||
zh: '使用 Ansible 模块 win_user 执行 Windows 账号改密'
|
||||
ja: 'Ansible win_user モジュールを使用して Windows アカウントのパスワード変更'
|
||||
en: 'Using Ansible module win_user to change Windows account secret'
|
||||
|
||||
Params groups help text:
|
||||
zh: '请输入用户组,多个用户组使用逗号分隔(需填写已存在的用户组)'
|
||||
ja: 'グループを入力してください。複数のグループはコンマで区切ってください(既存のグループを入力してください)'
|
||||
en: 'Please enter the group. Multiple groups are separated by commas (please enter the existing group)'
|
||||
|
||||
|
|
|
@ -1,11 +1,12 @@
|
|||
- hosts: custom
|
||||
gather_facts: no
|
||||
vars:
|
||||
ansible_shell_type: sh
|
||||
ansible_connection: local
|
||||
|
||||
tasks:
|
||||
- name: Verify account
|
||||
ssh_ping:
|
||||
- name: Verify account (pyfreerdp)
|
||||
rdp_ping:
|
||||
login_host: "{{ jms_asset.address }}"
|
||||
login_port: "{{ jms_asset.port }}"
|
||||
login_user: "{{ account.username }}"
|
||||
|
|
|
@ -4,7 +4,7 @@
|
|||
ansible_connection: local
|
||||
|
||||
tasks:
|
||||
- name: Verify account
|
||||
- name: Verify account (paramiko)
|
||||
ssh_ping:
|
||||
login_host: "{{ jms_asset.address }}"
|
||||
login_port: "{{ jms_asset.port }}"
|
||||
|
|
|
@ -7,12 +7,14 @@ class SecretType(TextChoices):
|
|||
SSH_KEY = 'ssh_key', _('SSH key')
|
||||
ACCESS_KEY = 'access_key', _('Access key')
|
||||
TOKEN = 'token', _('Token')
|
||||
API_KEY = 'api_key', _("API key")
|
||||
|
||||
|
||||
class AliasAccount(TextChoices):
|
||||
ALL = '@ALL', _('All')
|
||||
INPUT = '@INPUT', _('Manual input')
|
||||
USER = '@USER', _('Dynamic user')
|
||||
ANON = '@ANON', _('Anonymous account')
|
||||
|
||||
|
||||
class Source(TextChoices):
|
||||
|
|
|
@ -45,7 +45,7 @@ class AccountFilterSet(BaseFilterSet):
|
|||
|
||||
class Meta:
|
||||
model = Account
|
||||
fields = ['id', 'asset_id', 'source_id']
|
||||
fields = ['id', 'asset_id', 'source_id', 'secret_type']
|
||||
|
||||
|
||||
class GatheredAccountFilterSet(BaseFilterSet):
|
||||
|
|
|
@ -1,12 +1,14 @@
|
|||
# Generated by Django 3.2.14 on 2022-12-28 07:29
|
||||
|
||||
import uuid
|
||||
|
||||
import django.db.models.deletion
|
||||
import simple_history.models
|
||||
from django.conf import settings
|
||||
from django.db import migrations, models
|
||||
|
||||
import common.db.encoder
|
||||
import common.db.fields
|
||||
from django.conf import settings
|
||||
from django.db import migrations, models
|
||||
import django.db.models.deletion
|
||||
import simple_history.models
|
||||
import uuid
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
@ -29,13 +31,16 @@ class Migration(migrations.Migration):
|
|||
('id', models.UUIDField(default=uuid.uuid4, primary_key=True, serialize=False)),
|
||||
('org_id',
|
||||
models.CharField(blank=True, db_index=True, default='', max_length=36, verbose_name='Organization')),
|
||||
('connectivity', models.CharField(choices=[('-', 'Unknown'), ('ok', 'Ok'), ('err', 'Error')], default='-', max_length=16, verbose_name='Connectivity')),
|
||||
('connectivity',
|
||||
models.CharField(choices=[('-', 'Unknown'), ('ok', 'Ok'), ('err', 'Error')], default='-',
|
||||
max_length=16, verbose_name='Connectivity')),
|
||||
('date_verified', models.DateTimeField(null=True, verbose_name='Date verified')),
|
||||
('name', models.CharField(max_length=128, verbose_name='Name')),
|
||||
('username', models.CharField(blank=True, db_index=True, max_length=128, verbose_name='Username')),
|
||||
('secret_type', models.CharField(
|
||||
choices=[('password', 'Password'), ('ssh_key', 'SSH key'), ('access_key', 'Access key'),
|
||||
('token', 'Token')], default='password', max_length=16, verbose_name='Secret type')),
|
||||
('token', 'Token'), ('api_key', 'API key')], default='password', max_length=16,
|
||||
verbose_name='Secret type')),
|
||||
('secret', common.db.fields.EncryptTextField(blank=True, null=True, verbose_name='Secret')),
|
||||
('privileged', models.BooleanField(default=False, verbose_name='Privileged')),
|
||||
('is_active', models.BooleanField(default=True, verbose_name='Is active')),
|
||||
|
@ -61,7 +66,8 @@ class Migration(migrations.Migration):
|
|||
('id', models.UUIDField(db_index=True, default=uuid.uuid4)),
|
||||
('secret_type', models.CharField(
|
||||
choices=[('password', 'Password'), ('ssh_key', 'SSH key'), ('access_key', 'Access key'),
|
||||
('token', 'Token')], default='password', max_length=16, verbose_name='Secret type')),
|
||||
('token', 'Token'), ('api_key', 'API key')], default='password', max_length=16,
|
||||
verbose_name='Secret type')),
|
||||
('secret', common.db.fields.EncryptTextField(blank=True, null=True, verbose_name='Secret')),
|
||||
('version', models.IntegerField(default=0, verbose_name='Version')),
|
||||
('history_id', models.AutoField(primary_key=True, serialize=False)),
|
||||
|
@ -96,7 +102,8 @@ class Migration(migrations.Migration):
|
|||
('username', models.CharField(blank=True, db_index=True, max_length=128, verbose_name='Username')),
|
||||
('secret_type', models.CharField(
|
||||
choices=[('password', 'Password'), ('ssh_key', 'SSH key'), ('access_key', 'Access key'),
|
||||
('token', 'Token')], default='password', max_length=16, verbose_name='Secret type')),
|
||||
('token', 'Token'), ('api_key', 'API key')], default='password', max_length=16,
|
||||
verbose_name='Secret type')),
|
||||
('secret', common.db.fields.EncryptTextField(blank=True, null=True, verbose_name='Secret')),
|
||||
('privileged', models.BooleanField(default=False, verbose_name='Privileged')),
|
||||
('is_active', models.BooleanField(default=True, verbose_name='Is active')),
|
||||
|
|
|
@ -1,11 +1,13 @@
|
|||
# Generated by Django 3.2.16 on 2022-12-30 08:08
|
||||
|
||||
import uuid
|
||||
|
||||
import django.db.models.deletion
|
||||
from django.conf import settings
|
||||
from django.db import migrations, models
|
||||
|
||||
import common.db.encoder
|
||||
import common.db.fields
|
||||
from django.conf import settings
|
||||
from django.db import migrations, models
|
||||
import django.db.models.deletion
|
||||
import uuid
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
@ -53,7 +55,8 @@ class Migration(migrations.Migration):
|
|||
primary_key=True, serialize=False, to='assets.baseautomation')),
|
||||
('secret_type', models.CharField(
|
||||
choices=[('password', 'Password'), ('ssh_key', 'SSH key'), ('access_key', 'Access key'),
|
||||
('token', 'Token')], default='password', max_length=16, verbose_name='Secret type')),
|
||||
('token', 'Token'), ('api_key', 'API key')], default='password', max_length=16,
|
||||
verbose_name='Secret type')),
|
||||
('secret_strategy', models.CharField(choices=[('specific', 'Specific password'),
|
||||
('random_one', 'All assets use the same random password'),
|
||||
('random_all',
|
||||
|
@ -156,7 +159,8 @@ class Migration(migrations.Migration):
|
|||
primary_key=True, serialize=False, to='assets.baseautomation')),
|
||||
('secret_type', models.CharField(
|
||||
choices=[('password', 'Password'), ('ssh_key', 'SSH key'), ('access_key', 'Access key'),
|
||||
('token', 'Token')], default='password', max_length=16, verbose_name='Secret type')),
|
||||
('token', 'Token'), ('api_key', 'API key')], default='password', max_length=16,
|
||||
verbose_name='Secret type')),
|
||||
('secret_strategy', models.CharField(choices=[('specific', 'Specific password'),
|
||||
('random_one', 'All assets use the same random password'),
|
||||
('random_all',
|
||||
|
|
|
@ -1,3 +1,3 @@
|
|||
from .base import *
|
||||
from .account import *
|
||||
from .automations import *
|
||||
from .base import *
|
||||
|
|
|
@ -88,20 +88,33 @@ class Account(AbsConnectivity, BaseAccount):
|
|||
def has_secret(self):
|
||||
return bool(self.secret)
|
||||
|
||||
@classmethod
|
||||
def get_special_account(cls, name):
|
||||
if name == AliasAccount.INPUT.value:
|
||||
return cls.get_manual_account()
|
||||
elif name == AliasAccount.ANON.value:
|
||||
return cls.get_anonymous_account()
|
||||
else:
|
||||
return cls(name=name, username=name, secret=None)
|
||||
|
||||
@classmethod
|
||||
def get_manual_account(cls):
|
||||
""" @INPUT 手动登录的账号(any) """
|
||||
return cls(name=AliasAccount.INPUT.label, username=AliasAccount.INPUT.value, secret=None)
|
||||
|
||||
@lazyproperty
|
||||
def versions(self):
|
||||
return self.history.count()
|
||||
@classmethod
|
||||
def get_anonymous_account(cls):
|
||||
return cls(name=AliasAccount.ANON.label, username=AliasAccount.ANON.value, secret=None)
|
||||
|
||||
@classmethod
|
||||
def get_user_account(cls):
|
||||
""" @USER 动态用户的账号(self) """
|
||||
return cls(name=AliasAccount.USER.label, username=AliasAccount.USER.value, secret=None)
|
||||
|
||||
@lazyproperty
|
||||
def versions(self):
|
||||
return self.history.count()
|
||||
|
||||
def get_su_from_accounts(self):
|
||||
""" 排除自己和以自己为 su-from 的账号 """
|
||||
return self.asset.accounts.exclude(id=self.id).exclude(su_from=self)
|
||||
|
@ -124,10 +137,13 @@ class AccountTemplate(BaseAccount):
|
|||
]
|
||||
|
||||
@classmethod
|
||||
def get_su_from_account_templates(cls, instance=None):
|
||||
if not instance:
|
||||
def get_su_from_account_templates(cls, pk=None):
|
||||
if pk is None:
|
||||
return cls.objects.all()
|
||||
return cls.objects.exclude(Q(id=instance.id) | Q(su_from=instance))
|
||||
return cls.objects.exclude(Q(id=pk) | Q(su_from_id=pk))
|
||||
|
||||
def __str__(self):
|
||||
return f'{self.name}({self.username})'
|
||||
|
||||
def get_su_from_account(self, asset):
|
||||
su_from = self.su_from
|
||||
|
|
|
@ -78,5 +78,8 @@ class BaseAccountSerializer(AuthValidateMixin, BulkOrgResourceModelSerializer):
|
|||
]
|
||||
extra_kwargs = {
|
||||
'spec_info': {'label': _('Spec info')},
|
||||
'username': {'help_text': _("Tip: If no username is required for authentication, fill in `null`")}
|
||||
'username': {'help_text': _(
|
||||
"Tip: If no username is required for authentication, fill in `null`, "
|
||||
"If AD account, like `username@domain`"
|
||||
)},
|
||||
}
|
||||
|
|
|
@ -63,15 +63,17 @@ class AutomationExecutionSerializer(serializers.ModelSerializer):
|
|||
|
||||
@staticmethod
|
||||
def get_snapshot(obj):
|
||||
tp = obj.snapshot['type']
|
||||
tp = obj.snapshot.get('type', '')
|
||||
type_display = tp if not hasattr(AutomationTypes, tp) \
|
||||
else getattr(AutomationTypes, tp).label
|
||||
snapshot = {
|
||||
'type': tp,
|
||||
'name': obj.snapshot['name'],
|
||||
'comment': obj.snapshot['comment'],
|
||||
'accounts': obj.snapshot['accounts'],
|
||||
'node_amount': len(obj.snapshot['nodes']),
|
||||
'asset_amount': len(obj.snapshot['assets']),
|
||||
'type_display': getattr(AutomationTypes, tp).label,
|
||||
'name': obj.snapshot.get('name'),
|
||||
'comment': obj.snapshot.get('comment'),
|
||||
'accounts': obj.snapshot.get('accounts'),
|
||||
'node_amount': len(obj.snapshot.get('nodes', [])),
|
||||
'asset_amount': len(obj.snapshot.get('assets', [])),
|
||||
'type_display': type_display,
|
||||
}
|
||||
return snapshot
|
||||
|
||||
|
|
|
@ -50,7 +50,7 @@ class ChangeSecretAutomationSerializer(AuthValidateMixin, BaseAutomationSerializ
|
|||
read_only_fields = BaseAutomationSerializer.Meta.read_only_fields
|
||||
fields = BaseAutomationSerializer.Meta.fields + read_only_fields + [
|
||||
'secret_type', 'secret_strategy', 'secret', 'password_rules',
|
||||
'ssh_key_change_strategy', 'passphrase', 'recipients',
|
||||
'ssh_key_change_strategy', 'passphrase', 'recipients', 'params'
|
||||
]
|
||||
extra_kwargs = {**BaseAutomationSerializer.Meta.extra_kwargs, **{
|
||||
'accounts': {'required': True},
|
||||
|
|
|
@ -10,7 +10,7 @@ class PushAccountAutomationSerializer(ChangeSecretAutomationSerializer):
|
|||
|
||||
class Meta(ChangeSecretAutomationSerializer.Meta):
|
||||
model = PushAccountAutomation
|
||||
fields = ['params'] + [
|
||||
fields = [
|
||||
n for n in ChangeSecretAutomationSerializer.Meta.fields
|
||||
if n not in ['recipients']
|
||||
]
|
||||
|
|
|
@ -39,7 +39,7 @@ urlpatterns = [
|
|||
|
||||
path('push-account/<uuid:pk>/asset/remove/', api.PushAccountRemoveAssetApi.as_view(),
|
||||
name='push-account-remove-asset'),
|
||||
path('push-accountt/<uuid:pk>/asset/add/', api.PushAccountAddAssetApi.as_view(), name='push-account-add-asset'),
|
||||
path('push-account/<uuid:pk>/asset/add/', api.PushAccountAddAssetApi.as_view(), name='push-account-add-asset'),
|
||||
path('push-account/<uuid:pk>/nodes/', api.PushAccountNodeAddRemoveApi.as_view(),
|
||||
name='push-account-add-or-remove-node'),
|
||||
path('push-account/<uuid:pk>/assets/', api.PushAccountAssetsListApi.as_view(), name='push-account-assets'),
|
||||
|
|
|
@ -4,7 +4,7 @@ from rest_framework import serializers
|
|||
from accounts.const import (
|
||||
SecretType, DEFAULT_PASSWORD_RULES
|
||||
)
|
||||
from common.utils import gen_key_pair, random_string
|
||||
from common.utils import ssh_key_gen, random_string
|
||||
from common.utils import validate_ssh_private_key, parse_ssh_private_key_str
|
||||
|
||||
|
||||
|
@ -16,7 +16,7 @@ class SecretGenerator:
|
|||
|
||||
@staticmethod
|
||||
def generate_ssh_key():
|
||||
private_key, public_key = gen_key_pair()
|
||||
private_key, public_key = ssh_key_gen()
|
||||
return private_key
|
||||
|
||||
def generate_password(self):
|
||||
|
|
|
@ -0,0 +1,9 @@
|
|||
from django.db import models
|
||||
from django.utils.translation import gettext_lazy as _
|
||||
|
||||
|
||||
class ActionChoices(models.TextChoices):
|
||||
reject = 'reject', _('Reject')
|
||||
accept = 'accept', _('Accept')
|
||||
review = 'review', _('Review')
|
||||
warning = 'warning', _('Warning')
|
|
@ -1,5 +1,4 @@
|
|||
# Generated by Django 3.2.17 on 2023-06-06 10:57
|
||||
from collections import defaultdict
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
@ -8,17 +7,20 @@ import common.db.fields
|
|||
|
||||
def migrate_users_login_acls(apps, schema_editor):
|
||||
login_acl_model = apps.get_model('acls', 'LoginACL')
|
||||
name_used = defaultdict(int)
|
||||
|
||||
for login_acl in login_acl_model.objects.all():
|
||||
name = login_acl.name
|
||||
if name_used[name] > 0:
|
||||
login_acl.name += "_{}".format(name_used[name])
|
||||
name_used[name] += 1
|
||||
name_used = []
|
||||
login_acls = []
|
||||
for login_acl in login_acl_model.objects.all().select_related('user'):
|
||||
name = '{}_{}'.format(login_acl.name, login_acl.user.username)
|
||||
if name.lower() in name_used:
|
||||
name += '_{}'.format(str(login_acl.user_id)[:4])
|
||||
name_used.append(name.lower())
|
||||
login_acl.name = name
|
||||
login_acl.users = {
|
||||
"type": "ids", "ids": [str(login_acl.user_id)]
|
||||
}
|
||||
login_acl.save()
|
||||
login_acls.append(login_acl)
|
||||
login_acl_model.objects.bulk_update(login_acls, ['name', 'users'])
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
|
|
@ -7,6 +7,7 @@ from common.db.models import JMSBaseModel
|
|||
from common.utils import contains_ip
|
||||
from common.utils.time_period import contains_time_period
|
||||
from orgs.mixins.models import OrgModelMixin, OrgManager
|
||||
from ..const import ActionChoices
|
||||
|
||||
__all__ = [
|
||||
'BaseACL', 'UserBaseACL', 'UserAssetAccountBaseACL',
|
||||
|
@ -16,12 +17,6 @@ from orgs.utils import tmp_to_root_org
|
|||
from orgs.utils import tmp_to_org
|
||||
|
||||
|
||||
class ActionChoices(models.TextChoices):
|
||||
reject = 'reject', _('Reject')
|
||||
accept = 'accept', _('Accept')
|
||||
review = 'review', _('Review')
|
||||
|
||||
|
||||
class BaseACLQuerySet(models.QuerySet):
|
||||
def active(self):
|
||||
return self.filter(is_active=True)
|
||||
|
|
|
@ -1,10 +1,11 @@
|
|||
from django.utils.translation import ugettext_lazy as _
|
||||
from rest_framework import serializers
|
||||
|
||||
from acls.models.base import ActionChoices, BaseACL
|
||||
from acls.models.base import BaseACL
|
||||
from common.serializers.fields import JSONManyToManyField, LabeledChoiceField
|
||||
from jumpserver.utils import has_valid_xpack_license
|
||||
from orgs.models import Organization
|
||||
from ..const import ActionChoices
|
||||
|
||||
common_help_text = _(
|
||||
"With * indicating a match all. "
|
||||
|
@ -60,18 +61,21 @@ class ActionAclSerializer(serializers.Serializer):
|
|||
super().__init__(*args, **kwargs)
|
||||
self.set_action_choices()
|
||||
|
||||
def set_action_choices(self):
|
||||
action = self.fields.get("action")
|
||||
if not action:
|
||||
return
|
||||
choices = action.choices
|
||||
if not has_valid_xpack_license():
|
||||
choices.pop(ActionChoices.review, None)
|
||||
action._choices = choices
|
||||
|
||||
|
||||
class BaserACLSerializer(ActionAclSerializer, serializers.Serializer):
|
||||
class Meta:
|
||||
action_choices_exclude = [ActionChoices.warning]
|
||||
|
||||
def set_action_choices(self):
|
||||
field_action = self.fields.get("action")
|
||||
if not field_action:
|
||||
return
|
||||
if not has_valid_xpack_license():
|
||||
field_action._choices.pop(ActionChoices.review, None)
|
||||
for choice in self.Meta.action_choices_exclude:
|
||||
field_action._choices.pop(choice, None)
|
||||
|
||||
|
||||
class BaseACLSerializer(ActionAclSerializer, serializers.Serializer):
|
||||
class Meta(ActionAclSerializer.Meta):
|
||||
model = BaseACL
|
||||
fields_mini = ["id", "name"]
|
||||
fields_small = fields_mini + [
|
||||
|
@ -84,6 +88,7 @@ class BaserACLSerializer(ActionAclSerializer, serializers.Serializer):
|
|||
extra_kwargs = {
|
||||
"priority": {"default": 50},
|
||||
"is_active": {"default": True},
|
||||
'reviewers': {'label': _('Recipients')},
|
||||
}
|
||||
|
||||
def validate_reviewers(self, reviewers):
|
||||
|
@ -107,16 +112,16 @@ class BaserACLSerializer(ActionAclSerializer, serializers.Serializer):
|
|||
return valid_reviewers
|
||||
|
||||
|
||||
class BaserUserACLSerializer(BaserACLSerializer):
|
||||
class BaseUserACLSerializer(BaseACLSerializer):
|
||||
users = JSONManyToManyField(label=_('User'))
|
||||
|
||||
class Meta(BaserACLSerializer.Meta):
|
||||
fields = BaserACLSerializer.Meta.fields + ['users']
|
||||
class Meta(BaseACLSerializer.Meta):
|
||||
fields = BaseACLSerializer.Meta.fields + ['users']
|
||||
|
||||
|
||||
class BaseUserAssetAccountACLSerializer(BaserUserACLSerializer):
|
||||
class BaseUserAssetAccountACLSerializer(BaseUserACLSerializer):
|
||||
assets = JSONManyToManyField(label=_('Asset'))
|
||||
accounts = serializers.ListField(label=_('Account'))
|
||||
|
||||
class Meta(BaserUserACLSerializer.Meta):
|
||||
fields = BaserUserACLSerializer.Meta.fields + ['assets', 'accounts']
|
||||
class Meta(BaseUserACLSerializer.Meta):
|
||||
fields = BaseUserACLSerializer.Meta.fields + ['assets', 'accounts']
|
||||
|
|
|
@ -31,6 +31,8 @@ class CommandFilterACLSerializer(BaseSerializer, BulkOrgResourceModelSerializer)
|
|||
class Meta(BaseSerializer.Meta):
|
||||
model = CommandFilterACL
|
||||
fields = BaseSerializer.Meta.fields + ['command_groups']
|
||||
# 默认都支持所有的 actions
|
||||
action_choices_exclude = []
|
||||
|
||||
|
||||
class CommandReviewSerializer(serializers.Serializer):
|
||||
|
|
|
@ -1,6 +1,7 @@
|
|||
from orgs.mixins.serializers import BulkOrgResourceModelSerializer
|
||||
from .base import BaseUserAssetAccountACLSerializer as BaseSerializer
|
||||
from ..models import ConnectMethodACL
|
||||
from ..const import ActionChoices
|
||||
|
||||
__all__ = ["ConnectMethodACLSerializer"]
|
||||
|
||||
|
@ -12,12 +13,6 @@ class ConnectMethodACLSerializer(BaseSerializer, BulkOrgResourceModelSerializer)
|
|||
i for i in BaseSerializer.Meta.fields + ['connect_methods']
|
||||
if i not in ['assets', 'accounts']
|
||||
]
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
super().__init__(*args, **kwargs)
|
||||
field_action = self.fields.get('action')
|
||||
if not field_action:
|
||||
return
|
||||
# 仅支持拒绝
|
||||
for k in ['review', 'accept']:
|
||||
field_action._choices.pop(k, None)
|
||||
action_choices_exclude = BaseSerializer.Meta.action_choices_exclude + [
|
||||
ActionChoices.review, ActionChoices.accept
|
||||
]
|
||||
|
|
|
@ -2,7 +2,7 @@ from django.utils.translation import ugettext as _
|
|||
|
||||
from common.serializers import MethodSerializer
|
||||
from orgs.mixins.serializers import BulkOrgResourceModelSerializer
|
||||
from .base import BaserUserACLSerializer
|
||||
from .base import BaseUserACLSerializer
|
||||
from .rules import RuleSerializer
|
||||
from ..models import LoginACL
|
||||
|
||||
|
@ -11,12 +11,12 @@ __all__ = ["LoginACLSerializer"]
|
|||
common_help_text = _("With * indicating a match all. ")
|
||||
|
||||
|
||||
class LoginACLSerializer(BaserUserACLSerializer, BulkOrgResourceModelSerializer):
|
||||
class LoginACLSerializer(BaseUserACLSerializer, BulkOrgResourceModelSerializer):
|
||||
rules = MethodSerializer(label=_('Rule'))
|
||||
|
||||
class Meta(BaserUserACLSerializer.Meta):
|
||||
class Meta(BaseUserACLSerializer.Meta):
|
||||
model = LoginACL
|
||||
fields = BaserUserACLSerializer.Meta.fields + ['rules', ]
|
||||
fields = BaseUserACLSerializer.Meta.fields + ['rules', ]
|
||||
|
||||
def get_rules_serializer(self):
|
||||
return RuleSerializer()
|
||||
|
|
|
@ -3,6 +3,7 @@ from .cloud import *
|
|||
from .custom import *
|
||||
from .database import *
|
||||
from .device import *
|
||||
from .gpt import *
|
||||
from .host import *
|
||||
from .permission import *
|
||||
from .web import *
|
||||
|
|
|
@ -82,7 +82,7 @@ class AssetFilterSet(BaseFilterSet):
|
|||
@staticmethod
|
||||
def filter_protocols(queryset, name, value):
|
||||
value = value.split(',')
|
||||
return queryset.filter(protocols__name__in=value)
|
||||
return queryset.filter(protocols__name__in=value).distinct()
|
||||
|
||||
@staticmethod
|
||||
def filter_labels(queryset, name, value):
|
||||
|
@ -91,7 +91,7 @@ class AssetFilterSet(BaseFilterSet):
|
|||
queryset = queryset.filter(labels__name=n, labels__value=v)
|
||||
else:
|
||||
q = Q(labels__name__contains=value) | Q(labels__value__contains=value)
|
||||
queryset = queryset.filter(q)
|
||||
queryset = queryset.filter(q).distinct()
|
||||
return queryset
|
||||
|
||||
|
||||
|
@ -121,6 +121,14 @@ class AssetViewSet(SuggestionMixin, NodeFilterMixin, OrgBulkModelViewSet):
|
|||
NodeFilterBackend, AttrRulesFilterBackend
|
||||
]
|
||||
|
||||
def get_queryset(self):
|
||||
queryset = super().get_queryset() \
|
||||
.prefetch_related('nodes', 'protocols') \
|
||||
.select_related('platform', 'domain')
|
||||
if queryset.model is not Asset:
|
||||
queryset = queryset.select_related('asset_ptr')
|
||||
return queryset
|
||||
|
||||
def get_serializer_class(self):
|
||||
cls = super().get_serializer_class()
|
||||
if self.action == "retrieve":
|
||||
|
|
|
@ -0,0 +1,16 @@
|
|||
from assets.models import GPT, Asset
|
||||
from assets.serializers import GPTSerializer
|
||||
|
||||
from .asset import AssetViewSet
|
||||
|
||||
__all__ = ['GPTViewSet']
|
||||
|
||||
|
||||
class GPTViewSet(AssetViewSet):
|
||||
model = GPT
|
||||
perm_model = Asset
|
||||
|
||||
def get_serializer_classes(self):
|
||||
serializer_classes = super().get_serializer_classes()
|
||||
serializer_classes['default'] = GPTSerializer
|
||||
return serializer_classes
|
|
@ -1,11 +1,11 @@
|
|||
from rest_framework.mixins import ListModelMixin
|
||||
from rest_framework.decorators import action
|
||||
from rest_framework.mixins import ListModelMixin
|
||||
from rest_framework.response import Response
|
||||
|
||||
from assets.const import AllTypes
|
||||
from assets.serializers import CategorySerializer, TypeSerializer
|
||||
from common.api import JMSGenericViewSet
|
||||
from common.permissions import IsValidUser
|
||||
from assets.serializers import CategorySerializer, TypeSerializer
|
||||
from assets.const import AllTypes
|
||||
|
||||
__all__ = ['CategoryViewSet']
|
||||
|
||||
|
@ -32,4 +32,3 @@ class CategoryViewSet(ListModelMixin, JMSGenericViewSet):
|
|||
tp = request.query_params.get('type')
|
||||
constraints = AllTypes.get_constraints(category, tp)
|
||||
return Response(constraints)
|
||||
|
||||
|
|
|
@ -26,6 +26,8 @@ class DomainViewSet(OrgBulkModelViewSet):
|
|||
return serializers.DomainWithGatewaySerializer
|
||||
return serializers.DomainSerializer
|
||||
|
||||
def get_queryset(self):
|
||||
return super().get_queryset().prefetch_related('assets')
|
||||
|
||||
class GatewayViewSet(HostViewSet):
|
||||
perm_model = Gateway
|
||||
|
|
|
@ -38,5 +38,6 @@ class LabelViewSet(OrgBulkModelViewSet):
|
|||
return super().list(request, *args, **kwargs)
|
||||
|
||||
def get_queryset(self):
|
||||
self.queryset = Label.objects.annotate(asset_count=Count("assets"))
|
||||
self.queryset = Label.objects.prefetch_related(
|
||||
'assets').annotate(asset_count=Count("assets"))
|
||||
return self.queryset
|
||||
|
|
|
@ -4,20 +4,20 @@ from rest_framework.decorators import action
|
|||
from rest_framework.response import Response
|
||||
|
||||
from assets.const import AllTypes
|
||||
from assets.models import Platform, Node, Asset
|
||||
from assets.serializers import PlatformSerializer
|
||||
from assets.models import Platform, Node, Asset, PlatformProtocol
|
||||
from assets.serializers import PlatformSerializer, PlatformProtocolSerializer
|
||||
from common.api import JMSModelViewSet
|
||||
from common.permissions import IsValidUser
|
||||
from common.serializers import GroupedChoiceSerializer
|
||||
|
||||
__all__ = ['AssetPlatformViewSet', 'PlatformAutomationMethodsApi']
|
||||
__all__ = ['AssetPlatformViewSet', 'PlatformAutomationMethodsApi', 'PlatformProtocolViewSet']
|
||||
|
||||
|
||||
class AssetPlatformViewSet(JMSModelViewSet):
|
||||
queryset = Platform.objects.all()
|
||||
serializer_classes = {
|
||||
'default': PlatformSerializer,
|
||||
'categories': GroupedChoiceSerializer
|
||||
'categories': GroupedChoiceSerializer,
|
||||
}
|
||||
filterset_fields = ['name', 'category', 'type']
|
||||
search_fields = ['name']
|
||||
|
@ -25,7 +25,7 @@ class AssetPlatformViewSet(JMSModelViewSet):
|
|||
'categories': 'assets.view_platform',
|
||||
'type_constraints': 'assets.view_platform',
|
||||
'ops_methods': 'assets.view_platform',
|
||||
'filter_nodes_assets': 'assets.view_platform'
|
||||
'filter_nodes_assets': 'assets.view_platform',
|
||||
}
|
||||
|
||||
def get_queryset(self):
|
||||
|
@ -61,6 +61,15 @@ class AssetPlatformViewSet(JMSModelViewSet):
|
|||
return Response(serializer.data)
|
||||
|
||||
|
||||
class PlatformProtocolViewSet(JMSModelViewSet):
|
||||
queryset = PlatformProtocol.objects.all()
|
||||
serializer_class = PlatformProtocolSerializer
|
||||
filterset_fields = ['name', 'platform__name']
|
||||
rbac_perms = {
|
||||
'*': 'assets.add_platform'
|
||||
}
|
||||
|
||||
|
||||
class PlatformAutomationMethodsApi(generics.ListAPIView):
|
||||
permission_classes = (IsValidUser,)
|
||||
|
||||
|
|
|
@ -127,10 +127,13 @@ class NodeChildrenAsTreeApi(SerializeToTreeNodeMixin, NodeChildrenApi):
|
|||
if not self.instance or not include_assets:
|
||||
return Asset.objects.none()
|
||||
if query_all:
|
||||
assets = self.instance.get_all_assets_for_tree()
|
||||
assets = self.instance.get_all_assets()
|
||||
else:
|
||||
assets = self.instance.get_assets_for_tree()
|
||||
return assets
|
||||
assets = self.instance.get_assets()
|
||||
return assets.only(
|
||||
"id", "name", "address", "platform_id",
|
||||
"org_id", "is_active", 'comment'
|
||||
).prefetch_related('platform')
|
||||
|
||||
def filter_queryset_for_assets(self, assets):
|
||||
search = self.request.query_params.get('search')
|
||||
|
|
|
@ -0,0 +1,15 @@
|
|||
- hosts: custom
|
||||
gather_facts: no
|
||||
vars:
|
||||
ansible_shell_type: sh
|
||||
ansible_connection: local
|
||||
|
||||
tasks:
|
||||
- name: Test asset connection (pyfreerdp)
|
||||
rdp_ping:
|
||||
login_user: "{{ jms_account.username }}"
|
||||
login_password: "{{ jms_account.secret }}"
|
||||
login_host: "{{ jms_asset.address }}"
|
||||
login_port: "{{ jms_asset.port }}"
|
||||
login_secret_type: "{{ jms_account.secret_type }}"
|
||||
login_private_key_path: "{{ jms_account.private_key_path }}"
|
|
@ -0,0 +1,13 @@
|
|||
id: ping_by_rdp
|
||||
name: "{{ 'Ping by pyfreerdp' | trans }}"
|
||||
category:
|
||||
- device
|
||||
- host
|
||||
type:
|
||||
- windows
|
||||
method: ping
|
||||
i18n:
|
||||
Ping by pyfreerdp:
|
||||
zh: 使用 Python 模块 pyfreerdp 测试主机可连接性
|
||||
en: Ping by pyfreerdp module
|
||||
ja: Pyfreerdpモジュールを使用してホストにPingする
|
|
@ -4,7 +4,7 @@
|
|||
ansible_connection: local
|
||||
|
||||
tasks:
|
||||
- name: Test asset connection
|
||||
- name: Test asset connection (paramiko)
|
||||
ssh_ping:
|
||||
login_user: "{{ jms_account.username }}"
|
||||
login_password: "{{ jms_account.secret }}"
|
|
@ -1,7 +1,8 @@
|
|||
from django.db import models
|
||||
from django.db.models import TextChoices
|
||||
from django.utils.translation import gettext_lazy as _
|
||||
|
||||
from jumpserver.utils import has_valid_xpack_license
|
||||
from .protocol import Protocol
|
||||
|
||||
|
||||
class Type:
|
||||
|
@ -28,6 +29,12 @@ class Type:
|
|||
)
|
||||
|
||||
|
||||
class FillType(models.TextChoices):
|
||||
no = 'no', _('Disabled')
|
||||
basic = 'basic', _('Basic')
|
||||
script = 'script', _('Script')
|
||||
|
||||
|
||||
class BaseType(TextChoices):
|
||||
"""
|
||||
约束应该考虑代是对平台对限制,避免多余对选项,如: mysql 开启 ssh,
|
||||
|
@ -49,7 +56,7 @@ class BaseType(TextChoices):
|
|||
for k, v in cls.get_choices():
|
||||
tp_base = {**base_default, **base.get(k, {})}
|
||||
tp_auto = {**automation_default, **automation.get(k, {})}
|
||||
tp_protocols = {**protocols_default, **protocols.get(k, {})}
|
||||
tp_protocols = {**protocols_default, **{'port_from_addr': False}, **protocols.get(k, {})}
|
||||
tp_protocols = cls._parse_protocols(tp_protocols, k)
|
||||
tp_constrains = {**tp_base, 'protocols': tp_protocols, 'automation': tp_auto}
|
||||
constrains[k] = tp_constrains
|
||||
|
@ -57,14 +64,20 @@ class BaseType(TextChoices):
|
|||
|
||||
@classmethod
|
||||
def _parse_protocols(cls, protocol, tp):
|
||||
from .protocol import Protocol
|
||||
settings = Protocol.settings()
|
||||
choices = protocol.get('choices', [])
|
||||
if choices == '__self__':
|
||||
choices = [tp]
|
||||
protocols = [
|
||||
{'name': name, **settings.get(name, {})}
|
||||
for name in choices
|
||||
]
|
||||
|
||||
protocols = []
|
||||
for name in choices:
|
||||
protocol = {'name': name, **settings.get(name, {})}
|
||||
setting = protocol.pop('setting', {})
|
||||
setting_values = {k: v.get('default', None) for k, v in setting.items()}
|
||||
protocol['setting'] = setting_values
|
||||
protocols.append(protocol)
|
||||
|
||||
if protocols:
|
||||
protocols[0]['default'] = True
|
||||
return protocols
|
||||
|
|
|
@ -12,6 +12,7 @@ class Category(ChoicesMixin, models.TextChoices):
|
|||
DATABASE = 'database', _("Database")
|
||||
CLOUD = 'cloud', _("Cloud service")
|
||||
WEB = 'web', _("Web")
|
||||
GPT = 'gpt', "GPT"
|
||||
CUSTOM = 'custom', _("Custom type")
|
||||
|
||||
@classmethod
|
||||
|
|
|
@ -1,3 +1,6 @@
|
|||
from collections import defaultdict
|
||||
|
||||
from common.decorators import cached_method
|
||||
from .base import BaseType
|
||||
|
||||
|
||||
|
@ -9,7 +12,8 @@ class CustomTypes(BaseType):
|
|||
except Exception:
|
||||
return []
|
||||
types = set([p.type for p in platforms])
|
||||
return [(t, t) for t in types]
|
||||
choices = [(t, t) for t in types]
|
||||
return choices
|
||||
|
||||
@classmethod
|
||||
def _get_base_constrains(cls) -> dict:
|
||||
|
@ -37,13 +41,20 @@ class CustomTypes(BaseType):
|
|||
return constrains
|
||||
|
||||
@classmethod
|
||||
@cached_method(5)
|
||||
def _get_protocol_constrains(cls) -> dict:
|
||||
constrains = {}
|
||||
for platform in cls.get_custom_platforms():
|
||||
choices = list(platform.protocols.values_list('name', flat=True))
|
||||
if platform.type in constrains:
|
||||
choices = constrains[platform.type]['choices'] + choices
|
||||
constrains[platform.type] = {'choices': choices}
|
||||
from assets.models import PlatformProtocol
|
||||
_constrains = defaultdict(set)
|
||||
protocols = PlatformProtocol.objects \
|
||||
.filter(platform__category='custom') \
|
||||
.values_list('name', 'platform__type')
|
||||
for name, tp in protocols:
|
||||
_constrains[tp].add(name)
|
||||
|
||||
constrains = {
|
||||
tp: {'choices': list(choices)}
|
||||
for tp, choices in _constrains.items()
|
||||
}
|
||||
return constrains
|
||||
|
||||
@classmethod
|
||||
|
@ -51,6 +62,8 @@ class CustomTypes(BaseType):
|
|||
return {}
|
||||
|
||||
@classmethod
|
||||
@cached_method(5)
|
||||
def get_custom_platforms(cls):
|
||||
from assets.models import Platform
|
||||
return Platform.objects.filter(category='custom')
|
||||
platforms = Platform.objects.filter(category='custom')
|
||||
return platforms
|
||||
|
|
|
@ -0,0 +1,54 @@
|
|||
from django.utils.translation import gettext_lazy as _
|
||||
|
||||
from .base import BaseType
|
||||
|
||||
|
||||
class GPTTypes(BaseType):
|
||||
CHATGPT = 'chatgpt', _('ChatGPT')
|
||||
|
||||
@classmethod
|
||||
def _get_base_constrains(cls) -> dict:
|
||||
return {
|
||||
'*': {
|
||||
'charset_enabled': False,
|
||||
'domain_enabled': False,
|
||||
'su_enabled': False,
|
||||
}
|
||||
}
|
||||
|
||||
@classmethod
|
||||
def _get_automation_constrains(cls) -> dict:
|
||||
constrains = {
|
||||
'*': {
|
||||
'ansible_enabled': False,
|
||||
'ping_enabled': False,
|
||||
'gather_facts_enabled': False,
|
||||
'verify_account_enabled': False,
|
||||
'change_secret_enabled': False,
|
||||
'push_account_enabled': False,
|
||||
'gather_accounts_enabled': False,
|
||||
}
|
||||
}
|
||||
return constrains
|
||||
|
||||
@classmethod
|
||||
def _get_protocol_constrains(cls) -> dict:
|
||||
return {
|
||||
'*': {
|
||||
'choices': '__self__',
|
||||
}
|
||||
}
|
||||
|
||||
@classmethod
|
||||
def internal_platforms(cls):
|
||||
return {
|
||||
cls.CHATGPT: [
|
||||
{'name': 'ChatGPT'}
|
||||
],
|
||||
}
|
||||
|
||||
@classmethod
|
||||
def get_community_types(cls):
|
||||
return [
|
||||
cls.CHATGPT,
|
||||
]
|
|
@ -1,6 +1,10 @@
|
|||
from django.conf import settings
|
||||
from django.db import models
|
||||
from django.utils.translation import gettext_lazy as _
|
||||
|
||||
from common.db.models import ChoicesMixin
|
||||
from common.decorators import cached_method
|
||||
from .base import FillType
|
||||
|
||||
__all__ = ['Protocol']
|
||||
|
||||
|
@ -22,8 +26,9 @@ class Protocol(ChoicesMixin, models.TextChoices):
|
|||
mongodb = 'mongodb', 'MongoDB'
|
||||
|
||||
k8s = 'k8s', 'K8S'
|
||||
http = 'http', 'HTTP'
|
||||
_settings = None
|
||||
http = 'http', 'HTTP(s)'
|
||||
|
||||
chatgpt = 'chatgpt', 'ChatGPT'
|
||||
|
||||
@classmethod
|
||||
def device_protocols(cls):
|
||||
|
@ -32,16 +37,41 @@ class Protocol(ChoicesMixin, models.TextChoices):
|
|||
'port': 22,
|
||||
'secret_types': ['password', 'ssh_key'],
|
||||
'setting': {
|
||||
'sftp_enabled': True,
|
||||
'sftp_home': '/tmp',
|
||||
'sftp_enabled': {
|
||||
'type': 'bool',
|
||||
'default': True,
|
||||
'label': _('SFTP enabled')
|
||||
},
|
||||
'sftp_home': {
|
||||
'type': 'str',
|
||||
'default': '/tmp',
|
||||
'label': _('SFTP home')
|
||||
},
|
||||
}
|
||||
},
|
||||
cls.rdp: {
|
||||
'port': 3389,
|
||||
'secret_types': ['password'],
|
||||
'setting': {
|
||||
'console': False,
|
||||
'security': 'any',
|
||||
'console': {
|
||||
'type': 'bool',
|
||||
'default': False,
|
||||
'label': _('Console'),
|
||||
'help_text': _("Connect to console session")
|
||||
},
|
||||
'security': {
|
||||
'type': 'choice',
|
||||
'choices': [('any', _('Any')), ('rdp', 'RDP'), ('tls', 'TLS'), ('nla', 'NLA')],
|
||||
'default': 'any',
|
||||
'label': _('Security'),
|
||||
'help_text': _("Security layer to use for the connection")
|
||||
},
|
||||
'ad_domain': {
|
||||
'type': 'str',
|
||||
'required': False,
|
||||
'default': '',
|
||||
'label': _('AD domain')
|
||||
}
|
||||
}
|
||||
},
|
||||
cls.vnc: {
|
||||
|
@ -56,7 +86,11 @@ class Protocol(ChoicesMixin, models.TextChoices):
|
|||
'port': 5985,
|
||||
'secret_types': ['password'],
|
||||
'setting': {
|
||||
'use_ssl': False,
|
||||
'use_ssl': {
|
||||
'type': 'bool',
|
||||
'default': False,
|
||||
'label': _('Use SSL')
|
||||
},
|
||||
}
|
||||
},
|
||||
}
|
||||
|
@ -79,21 +113,25 @@ class Protocol(ChoicesMixin, models.TextChoices):
|
|||
'port': 5432,
|
||||
'required': True,
|
||||
'secret_types': ['password'],
|
||||
'xpack': True
|
||||
},
|
||||
cls.oracle: {
|
||||
'port': 1521,
|
||||
'required': True,
|
||||
'secret_types': ['password'],
|
||||
'xpack': True
|
||||
},
|
||||
cls.sqlserver: {
|
||||
'port': 1433,
|
||||
'required': True,
|
||||
'secret_types': ['password'],
|
||||
'xpack': True,
|
||||
},
|
||||
cls.clickhouse: {
|
||||
'port': 9000,
|
||||
'required': True,
|
||||
'secret_types': ['password'],
|
||||
'xpack': True,
|
||||
},
|
||||
cls.mongodb: {
|
||||
'port': 27017,
|
||||
|
@ -105,7 +143,11 @@ class Protocol(ChoicesMixin, models.TextChoices):
|
|||
'required': True,
|
||||
'secret_types': ['password'],
|
||||
'setting': {
|
||||
'auth_username': True,
|
||||
'auth_username': {
|
||||
'type': 'bool',
|
||||
'default': False,
|
||||
'label': _('Auth username')
|
||||
},
|
||||
}
|
||||
},
|
||||
}
|
||||
|
@ -115,32 +157,97 @@ class Protocol(ChoicesMixin, models.TextChoices):
|
|||
return {
|
||||
cls.k8s: {
|
||||
'port': 443,
|
||||
'port_from_addr': True,
|
||||
'required': True,
|
||||
'secret_types': ['token'],
|
||||
},
|
||||
cls.http: {
|
||||
'port': 80,
|
||||
'port_from_addr': True,
|
||||
'secret_types': ['password'],
|
||||
'setting': {
|
||||
'username_selector': 'name=username',
|
||||
'password_selector': 'name=password',
|
||||
'submit_selector': 'id=login_button',
|
||||
'autofill': {
|
||||
'label': _('Autofill'),
|
||||
'type': 'choice',
|
||||
'choices': FillType.choices,
|
||||
'default': 'basic',
|
||||
},
|
||||
'username_selector': {
|
||||
'type': 'str',
|
||||
'default': 'name=username',
|
||||
'label': _('Username selector')
|
||||
},
|
||||
'password_selector': {
|
||||
'type': 'str',
|
||||
'default': 'name=password',
|
||||
'label': _('Password selector')
|
||||
},
|
||||
'submit_selector': {
|
||||
'type': 'str',
|
||||
'default': 'type=submit',
|
||||
'label': _('Submit selector')
|
||||
},
|
||||
'script': {
|
||||
'type': 'text',
|
||||
'default': [],
|
||||
'label': _('Script'),
|
||||
}
|
||||
}
|
||||
},
|
||||
}
|
||||
|
||||
@classmethod
|
||||
def gpt_protocols(cls):
|
||||
protocols = {
|
||||
cls.chatgpt: {
|
||||
'port': 443,
|
||||
'required': True,
|
||||
'port_from_addr': True,
|
||||
'secret_types': ['api_key'],
|
||||
'setting': {
|
||||
'api_mode': {
|
||||
'type': 'choice',
|
||||
'default': 'gpt-3.5-turbo',
|
||||
'label': _('API mode'),
|
||||
'choices': [
|
||||
('gpt-3.5-turbo', 'GPT-3.5 Turbo'),
|
||||
('gpt-3.5-turbo-16k', 'GPT-3.5 Turbo 16K'),
|
||||
]
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
if settings.XPACK_ENABLED:
|
||||
choices = protocols[cls.chatgpt]['setting']['api_mode']['choices']
|
||||
choices.extend([
|
||||
('gpt-4', 'GPT-4'),
|
||||
('gpt-4-32k', 'GPT-4 32K'),
|
||||
])
|
||||
return protocols
|
||||
|
||||
@classmethod
|
||||
@cached_method(ttl=600)
|
||||
def settings(cls):
|
||||
return {
|
||||
**cls.device_protocols(),
|
||||
**cls.database_protocols(),
|
||||
**cls.cloud_protocols()
|
||||
**cls.cloud_protocols(),
|
||||
**cls.gpt_protocols(),
|
||||
}
|
||||
|
||||
@classmethod
|
||||
@cached_method(ttl=600)
|
||||
def xpack_protocols(cls):
|
||||
return [
|
||||
protocol
|
||||
for protocol, config in cls.settings().items()
|
||||
if config.get('xpack', False)
|
||||
]
|
||||
|
||||
@classmethod
|
||||
def protocol_secret_types(cls):
|
||||
settings = cls.settings()
|
||||
configs = cls.settings()
|
||||
return {
|
||||
protocol: settings[protocol]['secret_types'] or ['password']
|
||||
for protocol in cls.settings()
|
||||
protocol: configs[protocol]['secret_types'] or ['password']
|
||||
for protocol in configs
|
||||
}
|
||||
|
|
|
@ -10,6 +10,7 @@ from .cloud import CloudTypes
|
|||
from .custom import CustomTypes
|
||||
from .database import DatabaseTypes
|
||||
from .device import DeviceTypes
|
||||
from .gpt import GPTTypes
|
||||
from .host import HostTypes
|
||||
from .web import WebTypes
|
||||
|
||||
|
@ -18,7 +19,7 @@ class AllTypes(ChoicesMixin):
|
|||
choices: list
|
||||
includes = [
|
||||
HostTypes, DeviceTypes, DatabaseTypes,
|
||||
CloudTypes, WebTypes, CustomTypes
|
||||
CloudTypes, WebTypes, CustomTypes, GPTTypes
|
||||
]
|
||||
_category_constrains = {}
|
||||
|
||||
|
@ -147,6 +148,7 @@ class AllTypes(ChoicesMixin):
|
|||
(Category.DATABASE, DatabaseTypes),
|
||||
(Category.CLOUD, CloudTypes),
|
||||
(Category.WEB, WebTypes),
|
||||
(Category.GPT, GPTTypes),
|
||||
(Category.CUSTOM, CustomTypes),
|
||||
)
|
||||
|
||||
|
@ -193,7 +195,6 @@ class AllTypes(ChoicesMixin):
|
|||
}
|
||||
return node
|
||||
|
||||
|
||||
@classmethod
|
||||
def asset_to_node(cls, asset, pid):
|
||||
node = {
|
||||
|
@ -351,7 +352,7 @@ class AllTypes(ChoicesMixin):
|
|||
|
||||
for d in platform_datas:
|
||||
name = d['name']
|
||||
# print("\t - Platform: {}".format(name))
|
||||
print("\t - Platform: {}".format(name))
|
||||
_automation = d.pop('automation', {})
|
||||
_protocols = d.pop('_protocols', [])
|
||||
_protocols_setting = d.pop('protocols_setting', {})
|
||||
|
@ -364,7 +365,7 @@ class AllTypes(ChoicesMixin):
|
|||
setting = _protocols_setting.get(p['name'], {})
|
||||
p['required'] = setting.pop('required', False)
|
||||
p['default'] = setting.pop('default', False)
|
||||
p['setting'] = {**p.get('setting', {}), **setting}
|
||||
p['setting'] = {**p.get('setting', {}).get('default', ''), **setting}
|
||||
|
||||
platform_data = {
|
||||
**default_platform_data, **d,
|
||||
|
|
|
@ -1,4 +1,3 @@
|
|||
from django.db import models
|
||||
from django.utils.translation import gettext_lazy as _
|
||||
|
||||
from .base import BaseType
|
||||
|
@ -53,9 +52,3 @@ class WebTypes(BaseType):
|
|||
return [
|
||||
cls.WEBSITE,
|
||||
]
|
||||
|
||||
|
||||
class FillType(models.TextChoices):
|
||||
no = 'no', _('Disabled')
|
||||
basic = 'basic', _('Basic')
|
||||
script = 'script', _('Script')
|
||||
|
|
|
@ -2,6 +2,7 @@
|
|||
|
||||
import django.db
|
||||
from django.db import migrations, models
|
||||
|
||||
import common.db.fields
|
||||
|
||||
|
||||
|
@ -118,7 +119,7 @@ class Migration(migrations.Migration):
|
|||
primary_key=True, serialize=False, to='assets.asset')),
|
||||
],
|
||||
options={
|
||||
'verbose_name': 'Host',
|
||||
'verbose_name': 'Host',
|
||||
},
|
||||
),
|
||||
migrations.CreateModel(
|
||||
|
|
|
@ -137,6 +137,25 @@ def migrate_to_nodes(apps, *args):
|
|||
parent.save()
|
||||
|
||||
|
||||
def migrate_ori_host_to_devices(apps, *args):
|
||||
device_model = apps.get_model('assets', 'Device')
|
||||
asset_model = apps.get_model('assets', 'Asset')
|
||||
host_model = apps.get_model('assets', 'Host')
|
||||
hosts_need_migrate_to_device = host_model.objects.filter(asset_ptr__platform__category='device')
|
||||
assets = asset_model.objects.filter(id__in=hosts_need_migrate_to_device.values_list('asset_ptr_id', flat=True))
|
||||
assets_map = {asset.id: asset for asset in assets}
|
||||
|
||||
print("\t- Migrate ori host to device: ", len(hosts_need_migrate_to_device))
|
||||
for host in hosts_need_migrate_to_device:
|
||||
asset = assets_map.get(host.asset_ptr_id)
|
||||
if not asset:
|
||||
continue
|
||||
device = device_model(asset_ptr_id=asset.id)
|
||||
device.__dict__.update(asset.__dict__)
|
||||
device.save()
|
||||
host.delete(keep_parents=True)
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
dependencies = [
|
||||
('assets', '0097_auto_20220426_1558'),
|
||||
|
@ -146,5 +165,6 @@ class Migration(migrations.Migration):
|
|||
operations = [
|
||||
migrations.RunPython(migrate_database_to_asset),
|
||||
migrations.RunPython(migrate_cloud_to_asset),
|
||||
migrations.RunPython(migrate_to_nodes)
|
||||
migrations.RunPython(migrate_to_nodes),
|
||||
migrations.RunPython(migrate_ori_host_to_devices),
|
||||
]
|
||||
|
|
|
@ -2,16 +2,13 @@
|
|||
|
||||
from django.db import migrations, models
|
||||
|
||||
from assets.const import AllTypes
|
||||
|
||||
|
||||
def migrate_automation_push_account_params(apps, schema_editor):
|
||||
platform_automation_model = apps.get_model('assets', 'PlatformAutomation')
|
||||
platform_automation_methods = AllTypes.get_automation_methods()
|
||||
methods_id_data_map = {
|
||||
i['id']: None if i['params_serializer'] is None else i['params_serializer']({}).data
|
||||
for i in platform_automation_methods
|
||||
if i['method'] == 'push_account'
|
||||
'push_account_aix': {'sudo': '/bin/whoami', 'shell': '/bin/bash', 'home': '', 'groups': ''},
|
||||
'push_account_posix': {'sudo': '/bin/whoami', 'shell': '/bin/bash', 'home': '', 'groups': ''},
|
||||
'push_account_local_windows': {'groups': 'Users,Remote Desktop Users'},
|
||||
}
|
||||
automation_objs = []
|
||||
for automation in platform_automation_model.objects.all():
|
||||
|
|
|
@ -0,0 +1,39 @@
|
|||
# Generated by Django 3.2.19 on 2023-06-30 08:13
|
||||
|
||||
import django.db.models.deletion
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
def add_chatgpt_platform(apps, schema_editor):
|
||||
platform_cls = apps.get_model('assets', 'Platform')
|
||||
automation_cls = apps.get_model('assets', 'PlatformAutomation')
|
||||
platform = platform_cls.objects.create(
|
||||
name='ChatGPT', internal=True, category='gpt', type='chatgpt',
|
||||
domain_enabled=False, su_enabled=False, comment='ChatGPT',
|
||||
created_by='System', updated_by='System',
|
||||
)
|
||||
platform.protocols.create(name='chatgpt', port=443, primary=True, setting={'api_mode': 'gpt-3.5-turbo'})
|
||||
automation_cls.objects.create(ansible_enabled=False, platform=platform)
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
dependencies = [
|
||||
('assets', '0119_assets_add_default_node'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.CreateModel(
|
||||
name='GPT',
|
||||
fields=[
|
||||
('asset_ptr',
|
||||
models.OneToOneField(auto_created=True, on_delete=django.db.models.deletion.CASCADE, parent_link=True,
|
||||
primary_key=True, serialize=False, to='assets.asset')),
|
||||
('proxy', models.CharField(blank=True, default='', max_length=128, verbose_name='Proxy')),
|
||||
],
|
||||
options={
|
||||
'verbose_name': 'Web',
|
||||
},
|
||||
bases=('assets.asset',),
|
||||
),
|
||||
migrations.RunPython(add_chatgpt_platform)
|
||||
]
|
|
@ -3,5 +3,6 @@ from .common import *
|
|||
from .custom import *
|
||||
from .database import *
|
||||
from .device import *
|
||||
from .gpt import *
|
||||
from .host import *
|
||||
from .web import *
|
||||
|
|
|
@ -206,15 +206,14 @@ class Asset(NodesRelationMixin, AbsConnectivity, JSONFilterMixin, JMSOrgBaseMode
|
|||
@lazyproperty
|
||||
def auto_config(self):
|
||||
platform = self.platform
|
||||
automation = self.platform.automation
|
||||
auto_config = {
|
||||
'su_enabled': platform.su_enabled,
|
||||
'domain_enabled': platform.domain_enabled,
|
||||
'ansible_enabled': False
|
||||
}
|
||||
automation = getattr(self.platform, 'automation', None)
|
||||
if not automation:
|
||||
return auto_config
|
||||
|
||||
auto_config.update(model_to_dict(automation))
|
||||
return auto_config
|
||||
|
||||
|
|
|
@ -0,0 +1,11 @@
|
|||
from django.db import models
|
||||
from django.utils.translation import gettext_lazy as _
|
||||
|
||||
from .common import Asset
|
||||
|
||||
|
||||
class GPT(Asset):
|
||||
proxy = models.CharField(max_length=128, blank=True, default='', verbose_name=_("Proxy"))
|
||||
|
||||
class Meta:
|
||||
verbose_name = _("Web")
|
|
@ -1,7 +1,7 @@
|
|||
from django.db import models
|
||||
from django.utils.translation import gettext_lazy as _
|
||||
|
||||
from assets.const.web import FillType
|
||||
from assets.const import FillType
|
||||
from .common import Asset
|
||||
|
||||
|
||||
|
|
|
@ -7,12 +7,9 @@ from __future__ import unicode_literals
|
|||
import uuid
|
||||
|
||||
from django.db import models
|
||||
import logging
|
||||
from django.utils.translation import ugettext_lazy as _
|
||||
|
||||
|
||||
__all__ = ['AssetGroup']
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class AssetGroup(models.Model):
|
||||
|
|
|
@ -429,18 +429,6 @@ class NodeAssetsMixin(NodeAllAssetsMappingMixin):
|
|||
assets = Asset.objects.filter(nodes=self)
|
||||
return assets.distinct()
|
||||
|
||||
def get_assets_for_tree(self):
|
||||
return self.get_assets().only(
|
||||
"id", "name", "address", "platform_id",
|
||||
"org_id", "is_active"
|
||||
).prefetch_related('platform')
|
||||
|
||||
def get_all_assets_for_tree(self):
|
||||
return self.get_all_assets().only(
|
||||
"id", "name", "address", "platform_id",
|
||||
"org_id", "is_active"
|
||||
).prefetch_related('platform')
|
||||
|
||||
def get_valid_assets(self):
|
||||
return self.get_assets().valid()
|
||||
|
||||
|
|
|
@ -8,6 +8,8 @@ from common.db.models import JMSBaseModel
|
|||
|
||||
__all__ = ['Platform', 'PlatformProtocol', 'PlatformAutomation']
|
||||
|
||||
from common.utils import lazyproperty
|
||||
|
||||
|
||||
class PlatformProtocol(models.Model):
|
||||
name = models.CharField(max_length=32, verbose_name=_('Name'))
|
||||
|
@ -26,6 +28,11 @@ class PlatformProtocol(models.Model):
|
|||
def secret_types(self):
|
||||
return Protocol.settings().get(self.name, {}).get('secret_types', ['password'])
|
||||
|
||||
@lazyproperty
|
||||
def port_from_addr(self):
|
||||
from assets.const.protocol import Protocol as ProtocolConst
|
||||
return ProtocolConst.settings().get(self.name, {}).get('port_from_addr', False)
|
||||
|
||||
|
||||
class PlatformAutomation(models.Model):
|
||||
ansible_enabled = models.BooleanField(default=False, verbose_name=_("Enabled"))
|
||||
|
|
|
@ -4,5 +4,6 @@ from .common import *
|
|||
from .custom import *
|
||||
from .database import *
|
||||
from .device import *
|
||||
from .gpt import *
|
||||
from .host import *
|
||||
from .web import *
|
||||
|
|
|
@ -124,6 +124,7 @@ class AssetSerializer(BulkOrgResourceModelSerializer, WritableNestedModelSeriali
|
|||
protocols = AssetProtocolsSerializer(many=True, required=False, label=_('Protocols'), default=())
|
||||
accounts = AssetAccountSerializer(many=True, required=False, allow_null=True, write_only=True, label=_('Account'))
|
||||
nodes_display = serializers.ListField(read_only=False, required=False, label=_("Node path"))
|
||||
_accounts = None
|
||||
|
||||
class Meta:
|
||||
model = Asset
|
||||
|
@ -151,6 +152,13 @@ class AssetSerializer(BulkOrgResourceModelSerializer, WritableNestedModelSeriali
|
|||
def __init__(self, *args, **kwargs):
|
||||
super().__init__(*args, **kwargs)
|
||||
self._init_field_choices()
|
||||
self._extract_accounts()
|
||||
|
||||
def _extract_accounts(self):
|
||||
if not getattr(self, 'initial_data', None):
|
||||
return
|
||||
accounts = self.initial_data.pop('accounts', None)
|
||||
self._accounts = accounts
|
||||
|
||||
def _get_protocols_required_default(self):
|
||||
platform = self._asset_platform
|
||||
|
@ -167,10 +175,9 @@ class AssetSerializer(BulkOrgResourceModelSerializer, WritableNestedModelSeriali
|
|||
return
|
||||
|
||||
protocols_required, protocols_default = self._get_protocols_required_default()
|
||||
protocols_data = [
|
||||
{'name': p.name, 'port': p.port}
|
||||
for p in protocols_required + protocols_default
|
||||
]
|
||||
protocol_map = {str(protocol.id): protocol for protocol in protocols_required + protocols_default}
|
||||
protocols = list(protocol_map.values())
|
||||
protocols_data = [{'name': p.name, 'port': p.port} for p in protocols]
|
||||
self.initial_data['protocols'] = protocols_data
|
||||
|
||||
def _init_field_choices(self):
|
||||
|
@ -263,7 +270,7 @@ class AssetSerializer(BulkOrgResourceModelSerializer, WritableNestedModelSeriali
|
|||
error = p.get('name') + ': ' + _("port out of range (0-65535)")
|
||||
raise serializers.ValidationError(error)
|
||||
|
||||
protocols_required, protocols_default = self._get_protocols_required_default()
|
||||
protocols_required, __ = self._get_protocols_required_default()
|
||||
protocols_not_found = [p.name for p in protocols_required if p.name not in protocols_data_map]
|
||||
if protocols_not_found:
|
||||
raise serializers.ValidationError({
|
||||
|
@ -277,7 +284,6 @@ class AssetSerializer(BulkOrgResourceModelSerializer, WritableNestedModelSeriali
|
|||
return
|
||||
for data in accounts_data:
|
||||
data['asset'] = asset.id
|
||||
|
||||
s = AssetAccountSerializer(data=accounts_data, many=True)
|
||||
s.is_valid(raise_exception=True)
|
||||
s.save()
|
||||
|
@ -285,16 +291,13 @@ class AssetSerializer(BulkOrgResourceModelSerializer, WritableNestedModelSeriali
|
|||
@atomic
|
||||
def create(self, validated_data):
|
||||
nodes_display = validated_data.pop('nodes_display', '')
|
||||
accounts = validated_data.pop('accounts', [])
|
||||
instance = super().create(validated_data)
|
||||
self.accounts_create(accounts, instance)
|
||||
self.accounts_create(self._accounts, instance)
|
||||
self.perform_nodes_display_create(instance, nodes_display)
|
||||
return instance
|
||||
|
||||
@atomic
|
||||
def update(self, instance, validated_data):
|
||||
if not validated_data.get('accounts'):
|
||||
validated_data.pop('accounts', None)
|
||||
nodes_display = validated_data.pop('nodes_display', '')
|
||||
instance = super().update(instance, validated_data)
|
||||
self.perform_nodes_display_create(instance, nodes_display)
|
||||
|
|
|
@ -0,0 +1,33 @@
|
|||
from django.utils.translation import gettext_lazy as _
|
||||
from rest_framework import serializers
|
||||
|
||||
from assets.models import GPT
|
||||
from .common import AssetSerializer
|
||||
|
||||
__all__ = ['GPTSerializer']
|
||||
|
||||
|
||||
class GPTSerializer(AssetSerializer):
|
||||
class Meta(AssetSerializer.Meta):
|
||||
model = GPT
|
||||
fields = AssetSerializer.Meta.fields + [
|
||||
'proxy',
|
||||
]
|
||||
extra_kwargs = {
|
||||
**AssetSerializer.Meta.extra_kwargs,
|
||||
'proxy': {
|
||||
'help_text': _(
|
||||
'If the server cannot directly connect to the API address, '
|
||||
'you need set up an HTTP proxy. '
|
||||
'e.g. http(s)://host:port'
|
||||
),
|
||||
'label': _('HTTP proxy')}
|
||||
}
|
||||
|
||||
@staticmethod
|
||||
def validate_proxy(value):
|
||||
if value and not value.startswith(("http://", "https://")):
|
||||
raise serializers.ValidationError(
|
||||
_('Proxy must start with http:// or https://')
|
||||
)
|
||||
return value
|
|
@ -1,7 +1,7 @@
|
|||
from django.utils.translation import gettext_lazy as _
|
||||
from rest_framework import serializers
|
||||
|
||||
from assets.const.web import FillType
|
||||
from assets.const import FillType
|
||||
from assets.models import Database, Web
|
||||
from common.serializers.fields import LabeledChoiceField
|
||||
|
||||
|
@ -14,6 +14,7 @@ class DatabaseSpecSerializer(serializers.ModelSerializer):
|
|||
|
||||
class WebSpecSerializer(serializers.ModelSerializer):
|
||||
autofill = LabeledChoiceField(choices=FillType.choices, label=_('Autofill'))
|
||||
|
||||
class Meta:
|
||||
model = Web
|
||||
fields = [
|
||||
|
|
|
@ -51,14 +51,14 @@ class AutomationExecutionSerializer(serializers.ModelSerializer):
|
|||
from assets.const import AutomationTypes as AssetTypes
|
||||
from accounts.const import AutomationTypes as AccountTypes
|
||||
tp_dict = dict(AssetTypes.choices) | dict(AccountTypes.choices)
|
||||
tp = obj.snapshot['type']
|
||||
tp = obj.snapshot.get('type', '')
|
||||
snapshot = {
|
||||
'type': {'value': tp, 'label': tp_dict.get(tp, tp)},
|
||||
'name': obj.snapshot['name'],
|
||||
'comment': obj.snapshot['comment'],
|
||||
'accounts': obj.snapshot['accounts'],
|
||||
'node_amount': len(obj.snapshot['nodes']),
|
||||
'asset_amount': len(obj.snapshot['assets']),
|
||||
'name': obj.snapshot.get('name'),
|
||||
'comment': obj.snapshot.get('comment'),
|
||||
'accounts': obj.snapshot.get('accounts'),
|
||||
'node_amount': len(obj.snapshot.get('nodes', [])),
|
||||
'asset_amount': len(obj.snapshot.get('assets', [])),
|
||||
}
|
||||
return snapshot
|
||||
|
||||
|
|
|
@ -1,48 +1,17 @@
|
|||
from django.db.models import QuerySet
|
||||
from django.utils.translation import gettext_lazy as _
|
||||
from rest_framework import serializers
|
||||
|
||||
from assets.const.web import FillType
|
||||
from common.serializers import WritableNestedModelSerializer, type_field_map
|
||||
from common.serializers import (
|
||||
WritableNestedModelSerializer, type_field_map, MethodSerializer,
|
||||
DictSerializer, create_serializer_class
|
||||
)
|
||||
from common.serializers.fields import LabeledChoiceField
|
||||
from common.utils import lazyproperty
|
||||
from ..const import Category, AllTypes
|
||||
from ..const import Category, AllTypes, Protocol
|
||||
from ..models import Platform, PlatformProtocol, PlatformAutomation
|
||||
|
||||
__all__ = ["PlatformSerializer", "PlatformOpsMethodSerializer"]
|
||||
|
||||
|
||||
class ProtocolSettingSerializer(serializers.Serializer):
|
||||
SECURITY_CHOICES = [
|
||||
("any", "Any"),
|
||||
("rdp", "RDP"),
|
||||
("tls", "TLS"),
|
||||
("nla", "NLA"),
|
||||
]
|
||||
# RDP
|
||||
console = serializers.BooleanField(required=False, default=False)
|
||||
security = serializers.ChoiceField(choices=SECURITY_CHOICES, default="any")
|
||||
|
||||
# SFTP
|
||||
sftp_enabled = serializers.BooleanField(default=True, label=_("SFTP enabled"))
|
||||
sftp_home = serializers.CharField(default="/tmp", label=_("SFTP home"))
|
||||
|
||||
# HTTP
|
||||
autofill = serializers.ChoiceField(default='basic', choices=FillType.choices, label=_("Autofill"))
|
||||
username_selector = serializers.CharField(
|
||||
default="", allow_blank=True, label=_("Username selector")
|
||||
)
|
||||
password_selector = serializers.CharField(
|
||||
default="", allow_blank=True, label=_("Password selector")
|
||||
)
|
||||
submit_selector = serializers.CharField(
|
||||
default="", allow_blank=True, label=_("Submit selector")
|
||||
)
|
||||
script = serializers.JSONField(default=list, label=_("Script"))
|
||||
# Redis
|
||||
auth_username = serializers.BooleanField(default=False, label=_("Auth with username"))
|
||||
|
||||
# WinRM
|
||||
use_ssl = serializers.BooleanField(default=False, label=_("Use SSL"))
|
||||
__all__ = ["PlatformSerializer", "PlatformOpsMethodSerializer", "PlatformProtocolSerializer"]
|
||||
|
||||
|
||||
class PlatformAutomationSerializer(serializers.ModelSerializer):
|
||||
|
@ -76,15 +45,57 @@ class PlatformAutomationSerializer(serializers.ModelSerializer):
|
|||
|
||||
|
||||
class PlatformProtocolSerializer(serializers.ModelSerializer):
|
||||
setting = ProtocolSettingSerializer(required=False, allow_null=True)
|
||||
setting = MethodSerializer(required=False, label=_("Setting"))
|
||||
port_from_addr = serializers.BooleanField(label=_("Port from addr"), read_only=True)
|
||||
|
||||
class Meta:
|
||||
model = PlatformProtocol
|
||||
fields = [
|
||||
"id", "name", "port", "primary",
|
||||
"required", "default", "public",
|
||||
"id", "name", "port", "port_from_addr",
|
||||
"primary", "required", "default", "public",
|
||||
"secret_types", "setting",
|
||||
]
|
||||
extra_kwargs = {
|
||||
"primary": {
|
||||
"help_text": _(
|
||||
"This protocol is primary, and it must be set when adding assets. "
|
||||
"Additionally, there can only be one primary protocol."
|
||||
)
|
||||
},
|
||||
"required": {
|
||||
"help_text": _("This protocol is required, and it must be set when adding assets.")
|
||||
},
|
||||
"default": {
|
||||
"help_text": _("This protocol is default, when adding assets, it will be displayed by default.")
|
||||
},
|
||||
"public": {
|
||||
"help_text": _("This protocol is public, asset will show this protocol to user")
|
||||
},
|
||||
}
|
||||
|
||||
def get_setting_serializer(self):
|
||||
request = self.context.get('request')
|
||||
default_field = DictSerializer(required=False)
|
||||
|
||||
if not request:
|
||||
return default_field
|
||||
|
||||
if self.instance and isinstance(self.instance, (QuerySet, list)):
|
||||
instance = self.instance[0]
|
||||
else:
|
||||
instance = self.instance
|
||||
|
||||
protocol = request.query_params.get('name', '')
|
||||
if instance and not protocol:
|
||||
protocol = instance.name
|
||||
|
||||
protocol_settings = Protocol.settings()
|
||||
setting_fields = protocol_settings.get(protocol, {}).get('setting')
|
||||
if not setting_fields:
|
||||
return default_field
|
||||
setting_fields = [{'name': k, **v} for k, v in setting_fields.items()]
|
||||
name = '{}ProtocolSettingSerializer'.format(protocol.capitalize())
|
||||
return create_serializer_class(name, setting_fields)()
|
||||
|
||||
def to_file_representation(self, data):
|
||||
return '{name}/{port}'.format(**data)
|
||||
|
@ -144,6 +155,18 @@ class PlatformSerializer(WritableNestedModelSerializer):
|
|||
"domain_default": {"label": _('Default Domain')},
|
||||
}
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
super().__init__(*args, **kwargs)
|
||||
self.set_initial_value()
|
||||
|
||||
def set_initial_value(self):
|
||||
if not hasattr(self, 'initial_data'):
|
||||
return
|
||||
if self.instance:
|
||||
return
|
||||
if not self.initial_data.get('automation'):
|
||||
self.initial_data['automation'] = {}
|
||||
|
||||
@property
|
||||
def platform_category_type(self):
|
||||
if self.instance:
|
||||
|
@ -189,8 +212,9 @@ class PlatformSerializer(WritableNestedModelSerializer):
|
|||
|
||||
def validate_automation(self, automation):
|
||||
automation = automation or {}
|
||||
automation = automation.get('ansible_enabled', False) \
|
||||
and self.constraints['automation'].get('ansible_enabled', False)
|
||||
ansible_enabled = automation.get('ansible_enabled', False) \
|
||||
and self.constraints['automation'].get('ansible_enabled', False)
|
||||
automation['ansible_enable'] = ansible_enabled
|
||||
return automation
|
||||
|
||||
|
||||
|
|
|
@ -14,6 +14,7 @@ router.register(r'devices', api.DeviceViewSet, 'device')
|
|||
router.register(r'databases', api.DatabaseViewSet, 'database')
|
||||
router.register(r'webs', api.WebViewSet, 'web')
|
||||
router.register(r'clouds', api.CloudViewSet, 'cloud')
|
||||
router.register(r'gpts', api.GPTViewSet, 'gpt')
|
||||
router.register(r'customs', api.CustomViewSet, 'custom')
|
||||
router.register(r'platforms', api.AssetPlatformViewSet, 'platform')
|
||||
router.register(r'labels', api.LabelViewSet, 'label')
|
||||
|
@ -21,6 +22,7 @@ router.register(r'nodes', api.NodeViewSet, 'node')
|
|||
router.register(r'domains', api.DomainViewSet, 'domain')
|
||||
router.register(r'gateways', api.GatewayViewSet, 'gateway')
|
||||
router.register(r'favorite-assets', api.FavoriteAssetViewSet, 'favorite-asset')
|
||||
router.register(r'protocol-settings', api.PlatformProtocolViewSet, 'protocol-setting')
|
||||
|
||||
urlpatterns = [
|
||||
# path('assets/<uuid:pk>/gateways/', api.AssetGatewayListApi.as_view(), name='asset-gateway-list'),
|
||||
|
@ -46,7 +48,8 @@ urlpatterns = [
|
|||
path('nodes/<uuid:pk>/tasks/', api.NodeTaskCreateApi.as_view(), name='node-task-create'),
|
||||
|
||||
path('gateways/<uuid:pk>/test-connective/', api.GatewayTestConnectionApi.as_view(), name='test-gateway-connective'),
|
||||
path('platform-automation-methods/', api.PlatformAutomationMethodsApi.as_view(), name='platform-automation-methods'),
|
||||
path('platform-automation-methods/', api.PlatformAutomationMethodsApi.as_view(),
|
||||
name='platform-automation-methods'),
|
||||
]
|
||||
|
||||
urlpatterns += router.urls
|
||||
|
|
|
@ -42,7 +42,7 @@ def _get_instance_field_value(
|
|||
if getattr(f, 'attname', None) in model_need_continue_fields:
|
||||
continue
|
||||
|
||||
value = getattr(instance, f.name) or getattr(instance, f.attname)
|
||||
value = getattr(instance, f.name, None) or getattr(instance, f.attname, None)
|
||||
if not isinstance(value, bool) and not value:
|
||||
continue
|
||||
|
||||
|
|
|
@ -8,12 +8,13 @@ from django.http import HttpResponse
|
|||
from django.shortcuts import get_object_or_404
|
||||
from django.utils import timezone
|
||||
from django.utils.translation import ugettext_lazy as _
|
||||
from rest_framework import status
|
||||
from rest_framework import status, serializers
|
||||
from rest_framework.decorators import action
|
||||
from rest_framework.exceptions import PermissionDenied
|
||||
from rest_framework.exceptions import PermissionDenied, ValidationError
|
||||
from rest_framework.request import Request
|
||||
from rest_framework.response import Response
|
||||
|
||||
from accounts.const import AliasAccount
|
||||
from common.api import JMSModelViewSet
|
||||
from common.exceptions import JMSException
|
||||
from common.utils import random_string, get_logger, get_request_ip
|
||||
|
@ -22,12 +23,12 @@ from common.utils.http import is_true, is_false
|
|||
from orgs.mixins.api import RootOrgViewMixin
|
||||
from perms.models import ActionChoices
|
||||
from terminal.connect_methods import NativeClient, ConnectMethodUtil
|
||||
from terminal.models import EndpointRule
|
||||
from terminal.models import EndpointRule, Endpoint
|
||||
from ..models import ConnectionToken, date_expired_default
|
||||
from ..serializers import (
|
||||
ConnectionTokenSerializer, ConnectionTokenSecretSerializer,
|
||||
SuperConnectionTokenSerializer, ConnectTokenAppletOptionSerializer,
|
||||
ConnectionTokenUpdateSerializer
|
||||
ConnectionTokenReusableSerializer,
|
||||
)
|
||||
|
||||
__all__ = ['ConnectionTokenViewSet', 'SuperConnectionTokenViewSet']
|
||||
|
@ -165,11 +166,13 @@ class RDPFileClientProtocolURLMixin:
|
|||
return data
|
||||
|
||||
def get_smart_endpoint(self, protocol, asset=None):
|
||||
target_ip = asset.get_target_ip() if asset else ''
|
||||
endpoint = EndpointRule.match_endpoint(
|
||||
target_instance=asset, target_ip=target_ip,
|
||||
protocol=protocol, request=self.request
|
||||
)
|
||||
endpoint = Endpoint.match_by_instance_label(asset, protocol)
|
||||
if not endpoint:
|
||||
target_ip = asset.get_target_ip() if asset else ''
|
||||
endpoint = EndpointRule.match_endpoint(
|
||||
target_instance=asset, target_ip=target_ip,
|
||||
protocol=protocol, request=self.request
|
||||
)
|
||||
return endpoint
|
||||
|
||||
|
||||
|
@ -211,6 +214,18 @@ class ExtraActionApiMixin(RDPFileClientProtocolURLMixin):
|
|||
instance.expire()
|
||||
return Response(status=status.HTTP_204_NO_CONTENT)
|
||||
|
||||
@action(methods=['PATCH'], detail=True, url_path='reuse')
|
||||
def reuse(self, request, *args, **kwargs):
|
||||
instance = self.get_object()
|
||||
if not settings.CONNECTION_TOKEN_REUSABLE:
|
||||
error = _('Reusable connection token is not allowed, global setting not enabled')
|
||||
raise serializers.ValidationError(error)
|
||||
serializer = self.get_serializer(instance, data=request.data, partial=True)
|
||||
serializer.is_valid(raise_exception=True)
|
||||
is_reusable = serializer.validated_data.get('is_reusable', False)
|
||||
instance.set_reusable(is_reusable)
|
||||
return Response(data=serializer.data)
|
||||
|
||||
@action(methods=['POST'], detail=False)
|
||||
def exchange(self, request, *args, **kwargs):
|
||||
pk = request.data.get('id', None) or request.data.get('pk', None)
|
||||
|
@ -231,17 +246,16 @@ class ConnectionTokenViewSet(ExtraActionApiMixin, RootOrgViewMixin, JMSModelView
|
|||
search_fields = filterset_fields
|
||||
serializer_classes = {
|
||||
'default': ConnectionTokenSerializer,
|
||||
'update': ConnectionTokenUpdateSerializer,
|
||||
'partial_update': ConnectionTokenUpdateSerializer,
|
||||
'reuse': ConnectionTokenReusableSerializer,
|
||||
}
|
||||
http_method_names = ['get', 'post', 'patch', 'head', 'options', 'trace']
|
||||
rbac_perms = {
|
||||
'list': 'authentication.view_connectiontoken',
|
||||
'retrieve': 'authentication.view_connectiontoken',
|
||||
'update': 'authentication.change_connectiontoken',
|
||||
'create': 'authentication.add_connectiontoken',
|
||||
'exchange': 'authentication.add_connectiontoken',
|
||||
'expire': 'authentication.change_connectiontoken',
|
||||
'reuse': 'authentication.reuse_connectiontoken',
|
||||
'expire': 'authentication.expire_connectiontoken',
|
||||
'get_rdp_file': 'authentication.add_connectiontoken',
|
||||
'get_client_protocol_url': 'authentication.add_connectiontoken',
|
||||
}
|
||||
|
@ -282,13 +296,17 @@ class ConnectionTokenViewSet(ExtraActionApiMixin, RootOrgViewMixin, JMSModelView
|
|||
data['org_id'] = asset.org_id
|
||||
data['user'] = user
|
||||
data['value'] = random_string(16)
|
||||
|
||||
if account_name == AliasAccount.ANON and asset.category not in ['web', 'custom']:
|
||||
raise ValidationError(_('Anonymous account is not supported for this asset'))
|
||||
|
||||
account = self._validate_perm(user, asset, account_name)
|
||||
if account.has_secret:
|
||||
data['input_secret'] = ''
|
||||
|
||||
if account.username != '@INPUT':
|
||||
if account.username != AliasAccount.INPUT:
|
||||
data['input_username'] = ''
|
||||
if account.username == '@USER':
|
||||
elif account.username == AliasAccount.USER:
|
||||
data['input_username'] = user.username
|
||||
|
||||
ticket = self._validate_acl(user, asset, account)
|
||||
|
@ -341,7 +359,7 @@ class SuperConnectionTokenViewSet(ConnectionTokenViewSet):
|
|||
rbac_perms = {
|
||||
'create': 'authentication.add_superconnectiontoken',
|
||||
'renewal': 'authentication.add_superconnectiontoken',
|
||||
'get_secret_detail': 'authentication.view_connectiontokensecret',
|
||||
'get_secret_detail': 'authentication.view_superconnectiontokensecret',
|
||||
'get_applet_info': 'authentication.view_superconnectiontoken',
|
||||
'release_applet_account': 'authentication.view_superconnectiontoken',
|
||||
}
|
||||
|
@ -371,7 +389,7 @@ class SuperConnectionTokenViewSet(ConnectionTokenViewSet):
|
|||
@action(methods=['POST'], detail=False, url_path='secret')
|
||||
def get_secret_detail(self, request, *args, **kwargs):
|
||||
""" 非常重要的 api, 在逻辑层再判断一下 rbac 权限, 双重保险 """
|
||||
rbac_perm = 'authentication.view_connectiontokensecret'
|
||||
rbac_perm = 'authentication.view_superconnectiontokensecret'
|
||||
if not request.user.has_perm(rbac_perm):
|
||||
raise PermissionDenied('Not allow to view secret')
|
||||
|
||||
|
|
|
@ -1,3 +1,4 @@
|
|||
from django.http import HttpResponseRedirect
|
||||
from rest_framework.generics import CreateAPIView
|
||||
from rest_framework.response import Response
|
||||
from rest_framework.permissions import AllowAny
|
||||
|
@ -41,7 +42,7 @@ class UserResetPasswordSendCodeApi(CreateAPIView):
|
|||
token = request.GET.get('token')
|
||||
userinfo = cache.get(token)
|
||||
if not userinfo:
|
||||
return reverse('authentication:forgot-previewing')
|
||||
return HttpResponseRedirect(reverse('authentication:forgot-previewing'))
|
||||
|
||||
serializer = self.get_serializer(data=request.data)
|
||||
serializer.is_valid(raise_exception=True)
|
||||
|
|
|
@ -9,6 +9,7 @@ from django_auth_ldap.config import _LDAPConfig, LDAPSearch, LDAPSearchUnion
|
|||
|
||||
from users.utils import construct_user_email
|
||||
from common.const import LDAP_AD_ACCOUNT_DISABLE
|
||||
from common.utils.http import is_true
|
||||
from .base import JMSBaseAuthBackend
|
||||
|
||||
logger = _LDAPConfig.get_logger()
|
||||
|
@ -162,10 +163,11 @@ class LDAPUser(_LDAPUser):
|
|||
try:
|
||||
value = self.attrs[attr][0]
|
||||
value = value.strip()
|
||||
if attr.lower() == 'useraccountcontrol' \
|
||||
and field == 'is_active' and value:
|
||||
value = int(value) & LDAP_AD_ACCOUNT_DISABLE \
|
||||
!= LDAP_AD_ACCOUNT_DISABLE
|
||||
if field == 'is_active':
|
||||
if attr.lower() == 'useraccountcontrol' and value:
|
||||
value = int(value) & LDAP_AD_ACCOUNT_DISABLE != LDAP_AD_ACCOUNT_DISABLE
|
||||
else:
|
||||
value = is_true(value)
|
||||
except LookupError:
|
||||
logger.warning("{} does not have a value for the attribute {}".format(self.dn, attr))
|
||||
else:
|
||||
|
|
|
@ -0,0 +1,24 @@
|
|||
# Generated by Django 3.2.19 on 2023-07-13 06:59
|
||||
|
||||
from django.db import migrations
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
dependencies = [
|
||||
('authentication', '0020_connectiontoken_connect_options'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AlterModelOptions(
|
||||
name='connectiontoken',
|
||||
options={'ordering': ('-date_expired',),
|
||||
'permissions': [('expire_connectiontoken', 'Can expire connection token'),
|
||||
('reuse_connectiontoken', 'Can reuse connection token')],
|
||||
'verbose_name': 'Connection token'},
|
||||
),
|
||||
migrations.AlterModelOptions(
|
||||
name='superconnectiontoken',
|
||||
options={'permissions': [('view_superconnectiontokensecret', 'Can view super connection token secret')],
|
||||
'verbose_name': 'Super connection token'},
|
||||
),
|
||||
]
|
|
@ -9,6 +9,7 @@ from django.utils import timezone
|
|||
from django.utils.translation import ugettext_lazy as _
|
||||
from rest_framework.exceptions import PermissionDenied
|
||||
|
||||
from accounts.const import AliasAccount
|
||||
from assets.const import Protocol
|
||||
from assets.const.host import GATEWAY_NAME
|
||||
from common.db.fields import EncryptTextField
|
||||
|
@ -21,7 +22,7 @@ from terminal.models import Applet
|
|||
|
||||
|
||||
def date_expired_default():
|
||||
return timezone.now() + timedelta(seconds=settings.CONNECTION_TOKEN_EXPIRATION)
|
||||
return timezone.now() + timedelta(seconds=settings.CONNECTION_TOKEN_ONETIME_EXPIRATION)
|
||||
|
||||
|
||||
class ConnectionToken(JMSOrgBaseModel):
|
||||
|
@ -53,10 +54,11 @@ class ConnectionToken(JMSOrgBaseModel):
|
|||
|
||||
class Meta:
|
||||
ordering = ('-date_expired',)
|
||||
verbose_name = _('Connection token')
|
||||
permissions = [
|
||||
('view_connectiontokensecret', _('Can view connection token secret'))
|
||||
('expire_connectiontoken', _('Can expire connection token')),
|
||||
('reuse_connectiontoken', _('Can reuse connection token')),
|
||||
]
|
||||
verbose_name = _('Connection token')
|
||||
|
||||
@property
|
||||
def is_expired(self):
|
||||
|
@ -79,6 +81,15 @@ class ConnectionToken(JMSOrgBaseModel):
|
|||
self.date_expired = timezone.now()
|
||||
self.save(update_fields=['date_expired'])
|
||||
|
||||
def set_reusable(self, is_reusable):
|
||||
self.is_reusable = is_reusable
|
||||
if self.is_reusable:
|
||||
seconds = settings.CONNECTION_TOKEN_REUSABLE_EXPIRATION
|
||||
else:
|
||||
seconds = settings.CONNECTION_TOKEN_ONETIME_EXPIRATION
|
||||
self.date_expired = timezone.now() + timedelta(seconds=seconds)
|
||||
self.save(update_fields=['is_reusable', 'date_expired'])
|
||||
|
||||
def renewal(self):
|
||||
""" 续期 Token,将来支持用户自定义创建 token 后,续期策略要修改 """
|
||||
self.date_expired = date_expired_default()
|
||||
|
@ -175,7 +186,7 @@ class ConnectionToken(JMSOrgBaseModel):
|
|||
if not applet:
|
||||
return None
|
||||
|
||||
host_account = applet.select_host_account(self.user)
|
||||
host_account = applet.select_host_account(self.user, self.asset)
|
||||
if not host_account:
|
||||
raise JMSException({'error': 'No host account available'})
|
||||
|
||||
|
@ -209,29 +220,19 @@ class ConnectionToken(JMSOrgBaseModel):
|
|||
if not self.asset:
|
||||
return None
|
||||
|
||||
account = self.asset.accounts.filter(name=self.account).first()
|
||||
if self.account == '@INPUT' or not account:
|
||||
data = {
|
||||
'name': self.account,
|
||||
'username': self.input_username,
|
||||
'secret_type': 'password',
|
||||
'secret': self.input_secret,
|
||||
'su_from': None,
|
||||
'org_id': self.asset.org_id,
|
||||
'asset': self.asset
|
||||
}
|
||||
if self.account.startswith('@'):
|
||||
account = Account.get_special_account(self.account)
|
||||
account.asset = self.asset
|
||||
account.org_id = self.asset.org_id
|
||||
|
||||
if self.account in [AliasAccount.INPUT, AliasAccount.USER]:
|
||||
account.username = self.input_username
|
||||
account.secret = self.input_secret
|
||||
else:
|
||||
data = {
|
||||
'name': account.name,
|
||||
'username': account.username,
|
||||
'secret_type': account.secret_type,
|
||||
'secret': account.secret or self.input_secret,
|
||||
'su_from': account.su_from,
|
||||
'org_id': account.org_id,
|
||||
'privileged': account.privileged,
|
||||
'asset': self.asset
|
||||
}
|
||||
return Account(**data)
|
||||
account = self.asset.accounts.filter(name=self.account).first()
|
||||
if not account.secret and self.input_secret:
|
||||
account.secret = self.input_secret
|
||||
return account
|
||||
|
||||
@lazyproperty
|
||||
def domain(self):
|
||||
|
@ -264,4 +265,7 @@ class ConnectionToken(JMSOrgBaseModel):
|
|||
class SuperConnectionToken(ConnectionToken):
|
||||
class Meta:
|
||||
proxy = True
|
||||
permissions = [
|
||||
('view_superconnectiontokensecret', _('Can view super connection token secret'))
|
||||
]
|
||||
verbose_name = _("Super connection token")
|
||||
|
|
|
@ -1,20 +1,18 @@
|
|||
from django.conf import settings
|
||||
from django.utils import timezone
|
||||
from django.utils.translation import ugettext_lazy as _
|
||||
from rest_framework import serializers
|
||||
|
||||
from common.serializers import CommonModelSerializer
|
||||
from common.serializers.fields import EncryptedField
|
||||
from orgs.mixins.serializers import OrgResourceModelSerializerMixin
|
||||
from perms.serializers.permission import ActionChoicesField
|
||||
from ..models import ConnectionToken
|
||||
|
||||
__all__ = [
|
||||
'ConnectionTokenSerializer', 'SuperConnectionTokenSerializer',
|
||||
'ConnectionTokenUpdateSerializer',
|
||||
'ConnectionTokenReusableSerializer',
|
||||
]
|
||||
|
||||
|
||||
class ConnectionTokenSerializer(OrgResourceModelSerializerMixin):
|
||||
class ConnectionTokenSerializer(CommonModelSerializer):
|
||||
expire_time = serializers.IntegerField(read_only=True, label=_('Expired time'))
|
||||
input_secret = EncryptedField(
|
||||
label=_("Input secret"), max_length=40960, required=False, allow_blank=True
|
||||
|
@ -60,30 +58,12 @@ class ConnectionTokenSerializer(OrgResourceModelSerializerMixin):
|
|||
return info
|
||||
|
||||
|
||||
class ConnectionTokenUpdateSerializer(ConnectionTokenSerializer):
|
||||
class Meta(ConnectionTokenSerializer.Meta):
|
||||
class ConnectionTokenReusableSerializer(CommonModelSerializer):
|
||||
class Meta:
|
||||
model = ConnectionToken
|
||||
fields = ['id', 'date_expired', 'is_reusable']
|
||||
can_update_fields = ['is_reusable']
|
||||
read_only_fields = list(set(ConnectionTokenSerializer.Meta.fields) - set(can_update_fields))
|
||||
|
||||
def _get_date_expired(self):
|
||||
delta = self.instance.date_expired - self.instance.date_created
|
||||
if delta.total_seconds() > 3600 * 24:
|
||||
return self.instance.date_expired
|
||||
|
||||
seconds = settings.CONNECTION_TOKEN_EXPIRATION_MAX
|
||||
return timezone.now() + timezone.timedelta(seconds=seconds)
|
||||
|
||||
@staticmethod
|
||||
def validate_is_reusable(value):
|
||||
if value and not settings.CONNECTION_TOKEN_REUSABLE:
|
||||
raise serializers.ValidationError(_('Reusable connection token is not allowed, global setting not enabled'))
|
||||
return value
|
||||
|
||||
def validate(self, attrs):
|
||||
reusable = attrs.get('is_reusable', False)
|
||||
if reusable:
|
||||
attrs['date_expired'] = self._get_date_expired()
|
||||
return attrs
|
||||
read_only_fields = list(set(fields) - set(can_update_fields))
|
||||
|
||||
|
||||
class SuperConnectionTokenSerializer(ConnectionTokenSerializer):
|
||||
|
|
|
@ -2,15 +2,19 @@
|
|||
#
|
||||
|
||||
from __future__ import unicode_literals
|
||||
import os
|
||||
|
||||
import datetime
|
||||
import os
|
||||
from typing import Callable
|
||||
|
||||
from django.db import IntegrityError
|
||||
from django.templatetags.static import static
|
||||
from django.conf import settings
|
||||
from django.contrib.auth import BACKEND_SESSION_KEY
|
||||
from django.contrib.auth import login as auth_login, logout as auth_logout
|
||||
from django.http import HttpResponse, HttpRequest
|
||||
from django.db import IntegrityError
|
||||
from django.http import HttpRequest
|
||||
from django.shortcuts import reverse, redirect
|
||||
from django.templatetags.static import static
|
||||
from django.urls import reverse_lazy
|
||||
from django.utils.decorators import method_decorator
|
||||
from django.utils.translation import ugettext as _, get_language
|
||||
from django.views.decorators.cache import never_cache
|
||||
|
@ -18,16 +22,13 @@ from django.views.decorators.csrf import csrf_protect
|
|||
from django.views.decorators.debug import sensitive_post_parameters
|
||||
from django.views.generic.base import TemplateView, RedirectView
|
||||
from django.views.generic.edit import FormView
|
||||
from django.conf import settings
|
||||
from django.urls import reverse_lazy
|
||||
from django.contrib.auth import BACKEND_SESSION_KEY
|
||||
|
||||
from common.utils import FlashMessageUtil, static_or_direct
|
||||
from users.utils import (
|
||||
redirect_user_first_login_or_index
|
||||
)
|
||||
from ..const import RSA_PRIVATE_KEY, RSA_PUBLIC_KEY
|
||||
from .. import mixins, errors
|
||||
from ..const import RSA_PRIVATE_KEY, RSA_PUBLIC_KEY
|
||||
from ..forms import get_user_login_form_cls
|
||||
|
||||
__all__ = [
|
||||
|
@ -203,7 +204,9 @@ class UserLoginView(mixins.AuthMixin, UserLoginContextMixin, FormView):
|
|||
|
||||
def form_valid(self, form):
|
||||
if not self.request.session.test_cookie_worked():
|
||||
return HttpResponse(_("Please enable cookies and try again."))
|
||||
form.add_error(None, _("Login timeout, please try again."))
|
||||
return self.form_invalid(form)
|
||||
|
||||
# https://docs.djangoproject.com/en/3.1/topics/http/sessions/#setting-test-cookies
|
||||
self.request.session.delete_test_cookie()
|
||||
|
||||
|
|
|
@ -10,6 +10,8 @@ from common.drf.filters import IDSpmFilter, CustomFilter, IDInFilter
|
|||
|
||||
__all__ = ['ExtraFilterFieldsMixin', 'OrderingFielderFieldsMixin']
|
||||
|
||||
logger = logging.getLogger('jumpserver.common')
|
||||
|
||||
|
||||
class ExtraFilterFieldsMixin:
|
||||
"""
|
||||
|
@ -54,7 +56,9 @@ class OrderingFielderFieldsMixin:
|
|||
try:
|
||||
valid_fields = self.get_valid_ordering_fields()
|
||||
except Exception as e:
|
||||
logging.debug('get_valid_ordering_fields error: %s' % e)
|
||||
logger.debug('get_valid_ordering_fields error: %s' % e)
|
||||
# 这里千万不要这么用,会让 logging 重复,至于为什么,我也不知道
|
||||
# logging.debug('get_valid_ordering_fields error: %s' % e)
|
||||
valid_fields = []
|
||||
|
||||
fields = list(chain(
|
||||
|
|
|
@ -40,7 +40,7 @@ class SignatureAuthentication(authentication.BaseAuthentication):
|
|||
required_headers = ["(request-target)", "date"]
|
||||
|
||||
def fetch_user_data(self, key_id, algorithm=None):
|
||||
"""Retuns a tuple (User, secret) or (None, None)."""
|
||||
"""Returns a tuple (User, secret) or (None, None)."""
|
||||
raise NotImplementedError()
|
||||
|
||||
def authenticate_header(self, request):
|
||||
|
|
|
@ -328,13 +328,13 @@ class RelatedManager:
|
|||
q = Q()
|
||||
if isinstance(val, str):
|
||||
val = [val]
|
||||
if ['*'] in val:
|
||||
return Q()
|
||||
for ip in val:
|
||||
if not ip:
|
||||
continue
|
||||
try:
|
||||
if ip == '*':
|
||||
return Q()
|
||||
elif '/' in ip:
|
||||
if '/' in ip:
|
||||
network = ipaddress.ip_network(ip)
|
||||
ips = network.hosts()
|
||||
q |= Q(**{"{}__in".format(name): ips})
|
||||
|
@ -378,7 +378,7 @@ class RelatedManager:
|
|||
|
||||
if match == 'ip_in':
|
||||
q = cls.get_ip_in_q(name, val)
|
||||
elif match in ("exact", "contains", "startswith", "endswith", "gte", "lte", "gt", "lt"):
|
||||
elif match in ("contains", "startswith", "endswith", "gte", "lte", "gt", "lt"):
|
||||
lookup = "{}__{}".format(name, match)
|
||||
q = Q(**{lookup: val})
|
||||
elif match == 'regex':
|
||||
|
@ -387,7 +387,7 @@ class RelatedManager:
|
|||
lookup = "{}__{}".format(name, match)
|
||||
q = Q(**{lookup: val})
|
||||
except re.error:
|
||||
q = ~Q()
|
||||
q = Q(pk__isnull=True)
|
||||
elif match == "not":
|
||||
q = ~Q(**{name: val})
|
||||
elif match in ['m2m', 'in']:
|
||||
|
@ -459,7 +459,7 @@ class JSONManyToManyDescriptor:
|
|||
|
||||
custom_q = Q()
|
||||
for rule in attr_rules:
|
||||
value = getattr(obj, rule['name'], '')
|
||||
value = getattr(obj, rule['name'], None) or ''
|
||||
rule_value = rule.get('value', '')
|
||||
rule_match = rule.get('match', 'exact')
|
||||
|
||||
|
@ -470,11 +470,11 @@ class JSONManyToManyDescriptor:
|
|||
continue
|
||||
|
||||
if rule_match == 'in':
|
||||
res &= value in rule_value
|
||||
res &= value in rule_value or '*' in rule_value
|
||||
elif rule_match == 'exact':
|
||||
res &= value == rule_value
|
||||
res &= value == rule_value or rule_value == '*'
|
||||
elif rule_match == 'contains':
|
||||
res &= rule_value in value
|
||||
res &= (rule_value in value)
|
||||
elif rule_match == 'startswith':
|
||||
res &= str(value).startswith(str(rule_value))
|
||||
elif rule_match == 'endswith':
|
||||
|
@ -499,7 +499,7 @@ class JSONManyToManyDescriptor:
|
|||
elif rule['match'] == 'ip_in':
|
||||
if isinstance(rule_value, str):
|
||||
rule_value = [rule_value]
|
||||
res &= contains_ip(value, rule_value)
|
||||
res &= '*' in rule_value or contains_ip(value, rule_value)
|
||||
elif rule['match'] == 'm2m':
|
||||
if isinstance(value, Manager):
|
||||
value = value.values_list('id', flat=True)
|
||||
|
|
|
@ -6,6 +6,7 @@ import inspect
|
|||
import threading
|
||||
import time
|
||||
from concurrent.futures import ThreadPoolExecutor
|
||||
from functools import wraps
|
||||
|
||||
from django.db import transaction
|
||||
|
||||
|
@ -217,3 +218,24 @@ def do_test():
|
|||
end = time.time()
|
||||
using = end - s
|
||||
print("end : %s, using: %s" % (end, using))
|
||||
|
||||
|
||||
def cached_method(ttl=20):
|
||||
_cache = {}
|
||||
|
||||
def decorator(func):
|
||||
@wraps(func)
|
||||
def wrapper(*args, **kwargs):
|
||||
key = (func, args, tuple(sorted(kwargs.items())))
|
||||
# 检查缓存是否存在且未过期
|
||||
if key in _cache and time.time() - _cache[key]['timestamp'] < ttl:
|
||||
return _cache[key]['result']
|
||||
|
||||
# 缓存过期或不存在,执行方法并缓存结果
|
||||
result = func(*args, **kwargs)
|
||||
_cache[key] = {'result': result, 'timestamp': time.time()}
|
||||
return result
|
||||
|
||||
return wrapper
|
||||
|
||||
return decorator
|
||||
|
|
|
@ -14,6 +14,8 @@ from rest_framework.serializers import ValidationError
|
|||
|
||||
from common import const
|
||||
|
||||
logger = logging.getLogger('jumpserver.common')
|
||||
|
||||
__all__ = [
|
||||
"DatetimeRangeFilter", "IDSpmFilter",
|
||||
'IDInFilter', "CustomFilter",
|
||||
|
@ -70,7 +72,7 @@ class DatetimeRangeFilter(filters.BaseFilterBackend):
|
|||
]
|
||||
```
|
||||
""".format(view.name)
|
||||
logging.error(msg)
|
||||
logger.error(msg)
|
||||
raise ImproperlyConfigured(msg)
|
||||
|
||||
def filter_queryset(self, request, queryset, view):
|
||||
|
@ -213,6 +215,6 @@ class AttrRulesFilterBackend(filters.BaseFilterBackend):
|
|||
except Exception:
|
||||
raise ValidationError({'attr_rules': 'attr_rules should be json'})
|
||||
|
||||
logging.debug('attr_rules: %s', attr_rules)
|
||||
logger.debug('attr_rules: %s', attr_rules)
|
||||
q = RelatedManager.get_to_filter_q(attr_rules, queryset.model)
|
||||
return queryset.filter(q).distinct()
|
||||
|
|
|
@ -52,14 +52,16 @@ class BaseFileParser(BaseParser):
|
|||
fields_map = {}
|
||||
fields = self.serializer_fields
|
||||
for k, v in fields.items():
|
||||
if v.read_only:
|
||||
# 资产平台的 id 是只读的, 导入更新资产平台会失败
|
||||
if v.read_only and k not in ['id', 'pk']:
|
||||
continue
|
||||
fields_map.update({
|
||||
v.label: k,
|
||||
k: k
|
||||
})
|
||||
lowercase_fields_map = {k.lower(): v for k, v in fields_map.items()}
|
||||
field_names = [
|
||||
fields_map.get(column_title.strip('*'), '')
|
||||
lowercase_fields_map.get(column_title.strip('*').lower(), '')
|
||||
for column_title in column_titles
|
||||
]
|
||||
return field_names
|
||||
|
|
|
@ -1,8 +1,10 @@
|
|||
import multiprocessing
|
||||
from django.core.management.base import BaseCommand, CommandError
|
||||
|
||||
from django.core.management.base import BaseCommand
|
||||
from django.db.models import TextChoices
|
||||
from .utils import ServicesUtil
|
||||
|
||||
from .hands import *
|
||||
from .utils import ServicesUtil
|
||||
|
||||
|
||||
class Services(TextChoices):
|
||||
|
@ -92,15 +94,11 @@ class BaseActionCommand(BaseCommand):
|
|||
super().__init__(*args, **kwargs)
|
||||
|
||||
def add_arguments(self, parser):
|
||||
cores = 10
|
||||
if (multiprocessing.cpu_count() * 2 + 1) < cores:
|
||||
cores = multiprocessing.cpu_count() * 2 + 1
|
||||
|
||||
parser.add_argument(
|
||||
'services', nargs='+', choices=Services.export_services_values(), help='Service',
|
||||
'services', nargs='+', choices=Services.export_services_values(), help='Service',
|
||||
)
|
||||
parser.add_argument('-d', '--daemon', nargs="?", const=True)
|
||||
parser.add_argument('-w', '--worker', type=int, nargs="?", default=cores)
|
||||
parser.add_argument('-w', '--worker', type=int, nargs="?", default=4)
|
||||
parser.add_argument('-f', '--force', nargs="?", const=True)
|
||||
|
||||
def initial_util(self, *args, **options):
|
||||
|
|
|
@ -1,12 +1,14 @@
|
|||
import logging
|
||||
import os
|
||||
import sys
|
||||
import logging
|
||||
|
||||
from django.conf import settings
|
||||
|
||||
from apps.jumpserver.const import CONFIG
|
||||
|
||||
try:
|
||||
from apps.jumpserver import const
|
||||
|
||||
__version__ = const.VERSION
|
||||
except ImportError as e:
|
||||
print("Not found __version__: {}".format(e))
|
||||
|
@ -15,12 +17,11 @@ except ImportError as e:
|
|||
__version__ = 'Unknown'
|
||||
sys.exit(1)
|
||||
|
||||
|
||||
HTTP_HOST = CONFIG.HTTP_BIND_HOST or '127.0.0.1'
|
||||
HTTP_PORT = CONFIG.HTTP_LISTEN_PORT or 8080
|
||||
WS_PORT = CONFIG.WS_LISTEN_PORT or 8082
|
||||
DEBUG = CONFIG.DEBUG or False
|
||||
BASE_DIR = os.path.dirname(settings.BASE_DIR)
|
||||
LOG_DIR = os.path.join(BASE_DIR, 'logs')
|
||||
LOG_DIR = os.path.join(BASE_DIR, 'data', 'logs')
|
||||
APPS_DIR = os.path.join(BASE_DIR, 'apps')
|
||||
TMP_DIR = os.path.join(BASE_DIR, 'tmp')
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
from ..hands import *
|
||||
from .base import BaseService
|
||||
from ..hands import *
|
||||
|
||||
__all__ = ['GunicornService']
|
||||
|
||||
|
@ -22,7 +22,8 @@ class GunicornService(BaseService):
|
|||
'-b', bind,
|
||||
'-k', 'uvicorn.workers.UvicornWorker',
|
||||
'-w', str(self.worker),
|
||||
'--max-requests', '4096',
|
||||
'--max-requests', '10240',
|
||||
'--max-requests-jitter', '2048',
|
||||
'--access-logformat', log_format,
|
||||
'--access-logfile', '-'
|
||||
]
|
||||
|
|
|
@ -44,19 +44,24 @@ def set_default_by_type(tp, data, field_info):
|
|||
|
||||
def create_serializer_class(serializer_name, fields_info):
|
||||
serializer_fields = {}
|
||||
fields_name = ['name', 'label', 'default', 'type', 'help_text']
|
||||
fields_name = ['name', 'label', 'default', 'required', 'type', 'help_text']
|
||||
|
||||
for i, field_info in enumerate(fields_info):
|
||||
data = {k: field_info.get(k) for k in fields_name}
|
||||
field_type = data.pop('type', 'str')
|
||||
|
||||
if data.get('default') is None:
|
||||
# 用户定义 default 和 required 可能会冲突, 所以要处理一下
|
||||
default = data.get('default', None)
|
||||
if default is None:
|
||||
data.pop('default', None)
|
||||
data['required'] = field_info.get('required', True)
|
||||
data['required'] = True
|
||||
elif default == '':
|
||||
data['required'] = False
|
||||
data['allow_blank'] = True
|
||||
else:
|
||||
data['required'] = False
|
||||
data = set_default_by_type(field_type, data, field_info)
|
||||
data = set_default_if_need(data, i)
|
||||
if data.get('default', None) is not None:
|
||||
data['required'] = False
|
||||
field_name = data.pop('name')
|
||||
field_class = type_field_map.get(field_type, serializers.CharField)
|
||||
serializer_fields[field_name] = field_class(**data)
|
||||
|
|
|
@ -212,6 +212,23 @@ class BitChoicesField(TreeChoicesField):
|
|||
|
||||
|
||||
class PhoneField(serializers.CharField):
|
||||
|
||||
def to_internal_value(self, data):
|
||||
if isinstance(data, dict):
|
||||
code = data.get('code')
|
||||
phone = data.get('phone', '')
|
||||
if code and phone:
|
||||
data = '{}{}'.format(code, phone)
|
||||
else:
|
||||
data = phone
|
||||
try:
|
||||
phone = phonenumbers.parse(data, 'CN')
|
||||
data = '{}{}'.format(phone.country_code, phone.national_number)
|
||||
except phonenumbers.NumberParseException:
|
||||
data = '+86{}'.format(data)
|
||||
|
||||
return super().to_internal_value(data)
|
||||
|
||||
def to_representation(self, value):
|
||||
if value:
|
||||
try:
|
||||
|
|
|
@ -1,6 +1,5 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
#
|
||||
import logging
|
||||
import os
|
||||
import re
|
||||
from collections import defaultdict
|
||||
|
@ -14,9 +13,10 @@ from django.dispatch import receiver
|
|||
from jumpserver.utils import get_current_request
|
||||
from .local import thread_local
|
||||
from .signals import django_ready
|
||||
from .utils import get_logger
|
||||
|
||||
pattern = re.compile(r'FROM `(\w+)`')
|
||||
logger = logging.getLogger("jumpserver.common")
|
||||
logger = get_logger(__name__)
|
||||
|
||||
|
||||
class Counter:
|
||||
|
@ -129,7 +129,6 @@ else:
|
|||
|
||||
@receiver(django_ready)
|
||||
def check_migrations_file_prefix_conflict(*args, **kwargs):
|
||||
|
||||
if not settings.DEBUG_DEV:
|
||||
return
|
||||
|
||||
|
@ -172,7 +171,7 @@ def check_migrations_file_prefix_conflict(*args, **kwargs):
|
|||
if not conflict_count:
|
||||
return
|
||||
|
||||
print('='*80)
|
||||
print('=' * 80)
|
||||
for conflict_file in conflict_files:
|
||||
msg_dir = '{:<15}'.format(conflict_file[0])
|
||||
msg_split = '=> '
|
||||
|
@ -181,4 +180,4 @@ def check_migrations_file_prefix_conflict(*args, **kwargs):
|
|||
msg_right2 = ' ' * len(msg_left) + msg_split + conflict_file[2]
|
||||
print(f'{msg_left}{msg_right1}\n{msg_right2}\n')
|
||||
|
||||
print('='*80)
|
||||
print('=' * 80)
|
||||
|
|
|
@ -49,6 +49,7 @@ def send_mail_attachment_async(subject, message, recipient_list, attachment_list
|
|||
if attachment_list is None:
|
||||
attachment_list = []
|
||||
from_email = settings.EMAIL_FROM or settings.EMAIL_HOST_USER
|
||||
subject = (settings.EMAIL_SUBJECT_PREFIX or '') + subject
|
||||
email = EmailMultiAlternatives(
|
||||
subject=subject,
|
||||
body=message,
|
||||
|
|
|
@ -1,15 +1,15 @@
|
|||
from django.core.cache import cache
|
||||
from django.conf import settings
|
||||
from django.core.mail import send_mail
|
||||
from celery import shared_task
|
||||
|
||||
from common.sdk.sms.exceptions import CodeError, CodeExpired, CodeSendTooFrequently
|
||||
from common.sdk.sms.endpoint import SMS
|
||||
from common.exceptions import JMSException
|
||||
from common.utils.random import random_string
|
||||
from common.utils import get_logger
|
||||
from django.conf import settings
|
||||
from django.core.cache import cache
|
||||
from django.utils.translation import gettext_lazy as _
|
||||
|
||||
from common.exceptions import JMSException
|
||||
from common.sdk.sms.endpoint import SMS
|
||||
from common.sdk.sms.exceptions import CodeError, CodeExpired, CodeSendTooFrequently
|
||||
from common.tasks import send_mail_async
|
||||
from common.utils import get_logger
|
||||
from common.utils.random import random_string
|
||||
|
||||
logger = get_logger(__file__)
|
||||
|
||||
|
||||
|
@ -78,8 +78,7 @@ class SendAndVerifyCodeUtil(object):
|
|||
def __send_with_email(self):
|
||||
subject = self.other_args.get('subject')
|
||||
message = self.other_args.get('message')
|
||||
from_email = settings.EMAIL_FROM or settings.EMAIL_HOST_USER
|
||||
send_mail(subject, message, from_email, [self.target], html_message=message)
|
||||
send_mail_async(subject, message, [self.target], html_message=message)
|
||||
|
||||
def __send(self, code):
|
||||
"""
|
||||
|
|
|
@ -17,9 +17,9 @@ from audits.models import UserLoginLog, PasswordChangeLog, OperateLog, FTPLog, J
|
|||
from common.utils import lazyproperty
|
||||
from common.utils.timezone import local_now, local_zero_hour
|
||||
from ops.const import JobStatus
|
||||
from ops.models import JobExecution
|
||||
from orgs.caches import OrgResourceStatisticsCache
|
||||
from orgs.utils import current_org
|
||||
from terminal.const import RiskLevelChoices
|
||||
from terminal.models import Session, Command
|
||||
from terminal.utils import ComponentsPrometheusMetricsUtil
|
||||
from users.models import User
|
||||
|
@ -50,6 +50,10 @@ class DateTimeMixin:
|
|||
t = local_now() - timezone.timedelta(days=days)
|
||||
return t
|
||||
|
||||
@lazyproperty
|
||||
def date_start_end(self):
|
||||
return self.days_to_datetime.date(), local_now().date()
|
||||
|
||||
@lazyproperty
|
||||
def dates_list(self):
|
||||
now = local_now()
|
||||
|
@ -126,12 +130,6 @@ class DateTimeMixin:
|
|||
queryset = JobLog.objects.filter(date_created__gte=t)
|
||||
return queryset
|
||||
|
||||
@lazyproperty
|
||||
def jobs_executed_queryset(self):
|
||||
t = self.days_to_datetime
|
||||
queryset = JobExecution.objects.filter(date_created__gte=t)
|
||||
return queryset
|
||||
|
||||
|
||||
class DatesLoginMetricMixin:
|
||||
dates_list: list
|
||||
|
@ -143,101 +141,40 @@ class DatesLoginMetricMixin:
|
|||
operate_logs_queryset: OperateLog.objects
|
||||
password_change_logs_queryset: PasswordChangeLog.objects
|
||||
|
||||
@staticmethod
|
||||
def get_cache_key(date, tp):
|
||||
date_str = date.strftime("%Y%m%d")
|
||||
key = "SESSION_DATE_{}_{}_{}".format(current_org.id, tp, date_str)
|
||||
return key
|
||||
|
||||
def __get_data_from_cache(self, date, tp):
|
||||
if date == timezone.now().date():
|
||||
return None
|
||||
cache_key = self.get_cache_key(date, tp)
|
||||
count = cache.get(cache_key)
|
||||
return count
|
||||
|
||||
def __set_data_to_cache(self, date, tp, count):
|
||||
cache_key = self.get_cache_key(date, tp)
|
||||
cache.set(cache_key, count, 3600)
|
||||
|
||||
@staticmethod
|
||||
def get_date_start_2_end(d):
|
||||
time_min = timezone.datetime.min.time()
|
||||
time_max = timezone.datetime.max.time()
|
||||
tz = timezone.get_current_timezone()
|
||||
ds = timezone.datetime.combine(d, time_min).replace(tzinfo=tz)
|
||||
de = timezone.datetime.combine(d, time_max).replace(tzinfo=tz)
|
||||
return ds, de
|
||||
|
||||
def get_date_login_count(self, date):
|
||||
tp = "LOGIN-USER"
|
||||
count = self.__get_data_from_cache(date, tp)
|
||||
if count is not None:
|
||||
return count
|
||||
ds, de = self.get_date_start_2_end(date)
|
||||
count = UserLoginLog.objects.filter(datetime__range=(ds, de)).count()
|
||||
self.__set_data_to_cache(date, tp, count)
|
||||
return count
|
||||
|
||||
def get_dates_metrics_total_count_login(self):
|
||||
data = []
|
||||
for d in self.dates_list:
|
||||
count = self.get_date_login_count(d)
|
||||
data.append(count)
|
||||
if len(data) == 0:
|
||||
data = [0]
|
||||
return data
|
||||
|
||||
def get_date_user_count(self, date):
|
||||
tp = "USER"
|
||||
count = self.__get_data_from_cache(date, tp)
|
||||
if count is not None:
|
||||
return count
|
||||
ds, de = self.get_date_start_2_end(date)
|
||||
count = len(set(Session.objects.filter(date_start__range=(ds, de)).values_list('user_id', flat=True)))
|
||||
self.__set_data_to_cache(date, tp, count)
|
||||
return count
|
||||
queryset = UserLoginLog.objects \
|
||||
.filter(datetime__range=(self.date_start_end)) \
|
||||
.values('datetime__date').annotate(id__count=Count(id)) \
|
||||
.order_by('datetime__date')
|
||||
map_date_logincount = {i['datetime__date']: i['id__count'] for i in queryset}
|
||||
return [map_date_logincount.get(d, 0) for d in self.dates_list]
|
||||
|
||||
def get_dates_metrics_total_count_active_users(self):
|
||||
data = []
|
||||
for d in self.dates_list:
|
||||
count = self.get_date_user_count(d)
|
||||
data.append(count)
|
||||
return data
|
||||
|
||||
def get_date_asset_count(self, date):
|
||||
tp = "ASSET"
|
||||
count = self.__get_data_from_cache(date, tp)
|
||||
if count is not None:
|
||||
return count
|
||||
ds, de = self.get_date_start_2_end(date)
|
||||
count = len(set(Session.objects.filter(date_start__range=(ds, de)).values_list('asset', flat=True)))
|
||||
self.__set_data_to_cache(date, tp, count)
|
||||
return count
|
||||
queryset = Session.objects \
|
||||
.filter(date_start__range=(self.date_start_end)) \
|
||||
.values('date_start__date') \
|
||||
.annotate(id__count=Count('user_id', distinct=True)) \
|
||||
.order_by('date_start__date')
|
||||
map_date_usercount = {i['date_start__date']: i['id__count'] for i in queryset}
|
||||
return [map_date_usercount.get(d, 0) for d in self.dates_list]
|
||||
|
||||
def get_dates_metrics_total_count_active_assets(self):
|
||||
data = []
|
||||
for d in self.dates_list:
|
||||
count = self.get_date_asset_count(d)
|
||||
data.append(count)
|
||||
return data
|
||||
|
||||
def get_date_session_count(self, date):
|
||||
tp = "SESSION"
|
||||
count = self.__get_data_from_cache(date, tp)
|
||||
if count is not None:
|
||||
return count
|
||||
ds, de = self.get_date_start_2_end(date)
|
||||
count = Session.objects.filter(date_start__range=(ds, de)).count()
|
||||
self.__set_data_to_cache(date, tp, count)
|
||||
return count
|
||||
queryset = Session.objects \
|
||||
.filter(date_start__range=(self.date_start_end)) \
|
||||
.values('date_start__date') \
|
||||
.annotate(id__count=Count('asset_id', distinct=True)) \
|
||||
.order_by('date_start__date')
|
||||
map_date_assetcount = {i['date_start__date']: i['id__count'] for i in queryset}
|
||||
return [map_date_assetcount.get(d, 0) for d in self.dates_list]
|
||||
|
||||
def get_dates_metrics_total_count_sessions(self):
|
||||
data = []
|
||||
for d in self.dates_list:
|
||||
count = self.get_date_session_count(d)
|
||||
data.append(count)
|
||||
return data
|
||||
queryset = Session.objects \
|
||||
.filter(date_start__range=(self.date_start_end)) \
|
||||
.values('date_start__date') \
|
||||
.annotate(id__count=Count(id)) \
|
||||
.order_by('date_start__date')
|
||||
map_date_usercount = {i['date_start__date']: i['id__count'] for i in queryset}
|
||||
return [map_date_usercount.get(d, 0) for d in self.dates_list]
|
||||
|
||||
@lazyproperty
|
||||
def get_type_to_assets(self):
|
||||
|
@ -312,7 +249,7 @@ class DatesLoginMetricMixin:
|
|||
|
||||
@lazyproperty
|
||||
def commands_danger_amount(self):
|
||||
return self.command_queryset.filter(risk_level=Command.RiskLevelChoices.dangerous).count()
|
||||
return self.command_queryset.filter(risk_level=RiskLevelChoices.reject).count()
|
||||
|
||||
@lazyproperty
|
||||
def job_logs_running_amount(self):
|
||||
|
|
|
@ -186,8 +186,9 @@ class Config(dict):
|
|||
'BOOTSTRAP_TOKEN': '',
|
||||
'DEBUG': False,
|
||||
'DEBUG_DEV': False,
|
||||
'DEBUG_ANSIBLE': False,
|
||||
'LOG_LEVEL': 'DEBUG',
|
||||
'LOG_DIR': os.path.join(PROJECT_DIR, 'logs'),
|
||||
'LOG_DIR': os.path.join(PROJECT_DIR, 'data', 'logs'),
|
||||
'DB_ENGINE': 'mysql',
|
||||
'DB_NAME': 'jumpserver',
|
||||
'DB_HOST': '127.0.0.1',
|
||||
|
@ -231,8 +232,12 @@ class Config(dict):
|
|||
'SESSION_COOKIE_AGE': 3600 * 24,
|
||||
'SESSION_EXPIRE_AT_BROWSER_CLOSE': False,
|
||||
'LOGIN_URL': reverse_lazy('authentication:login'),
|
||||
'CONNECTION_TOKEN_EXPIRATION': 5 * 60, # 默认
|
||||
'CONNECTION_TOKEN_EXPIRATION_MAX': 60 * 60 * 24 * 30, # 最大
|
||||
|
||||
'CONNECTION_TOKEN_ONETIME_EXPIRATION': 5 * 60, # 默认(new)
|
||||
'CONNECTION_TOKEN_EXPIRATION': 5 * 60, # 默认(old)
|
||||
|
||||
'CONNECTION_TOKEN_REUSABLE_EXPIRATION': 60 * 60 * 24 * 30, # 最大(new)
|
||||
'CONNECTION_TOKEN_EXPIRATION_MAX': 60 * 60 * 24 * 30, # 最大(old)
|
||||
'CONNECTION_TOKEN_REUSABLE': False,
|
||||
|
||||
# Custom Config
|
||||
|
@ -558,6 +563,11 @@ class Config(dict):
|
|||
'FTP_FILE_MAX_STORE': 100,
|
||||
}
|
||||
|
||||
old_config_map = {
|
||||
'CONNECTION_TOKEN_ONETIME_EXPIRATION': 'CONNECTION_TOKEN_EXPIRATION',
|
||||
'CONNECTION_TOKEN_REUSABLE_EXPIRATION': 'CONNECTION_TOKEN_EXPIRATION_MAX',
|
||||
}
|
||||
|
||||
def __init__(self, *args):
|
||||
super().__init__(*args)
|
||||
self.secret_encryptor = ConfigCrypto.get_secret_encryptor()
|
||||
|
@ -698,13 +708,19 @@ class Config(dict):
|
|||
value = self.convert_type(item, value)
|
||||
return value
|
||||
|
||||
def get(self, item):
|
||||
def get(self, item, default=None):
|
||||
# 再从配置文件中获取
|
||||
value = self.get_from_config(item)
|
||||
if value is None:
|
||||
value = self.get_from_env(item)
|
||||
|
||||
# 因为要递归,所以优先从上次返回的递归中获取
|
||||
if default is None:
|
||||
default = self.defaults.get(item)
|
||||
if value is None and item in self.old_config_map:
|
||||
return self.get(self.old_config_map[item], default)
|
||||
if value is None:
|
||||
value = self.defaults.get(item)
|
||||
value = default
|
||||
if self.secret_encryptor:
|
||||
value = self.secret_encryptor.decrypt_if_need(value, item)
|
||||
return value
|
||||
|
|
|
@ -101,6 +101,19 @@ class RefererCheckMiddleware:
|
|||
return response
|
||||
|
||||
|
||||
class SQLCountMiddleware:
|
||||
def __init__(self, get_response):
|
||||
self.get_response = get_response
|
||||
if not settings.DEBUG_DEV:
|
||||
raise MiddlewareNotUsed
|
||||
|
||||
def __call__(self, request):
|
||||
from django.db import connection
|
||||
response = self.get_response(request)
|
||||
response['X-JMS-SQL-COUNT'] = len(connection.queries) - 2
|
||||
return response
|
||||
|
||||
|
||||
class StartMiddleware:
|
||||
def __init__(self, get_response):
|
||||
self.get_response = get_response
|
||||
|
|
|
@ -175,13 +175,9 @@ AUTH_OAUTH2_LOGOUT_URL_NAME = "authentication:oauth2:logout"
|
|||
AUTH_TEMP_TOKEN = CONFIG.AUTH_TEMP_TOKEN
|
||||
|
||||
# Other setting
|
||||
# 这个是 User Login Private Token
|
||||
TOKEN_EXPIRATION = CONFIG.TOKEN_EXPIRATION
|
||||
OTP_IN_RADIUS = CONFIG.OTP_IN_RADIUS
|
||||
# Connection token
|
||||
CONNECTION_TOKEN_EXPIRATION = CONFIG.CONNECTION_TOKEN_EXPIRATION
|
||||
if CONNECTION_TOKEN_EXPIRATION < 5 * 60:
|
||||
# 最少5分钟
|
||||
CONNECTION_TOKEN_EXPIRATION = 5 * 60
|
||||
|
||||
RBAC_BACKEND = 'rbac.backends.RBACBackend'
|
||||
AUTH_BACKEND_MODEL = 'authentication.backends.base.JMSModelBackend'
|
||||
|
|
|
@ -53,6 +53,8 @@ BOOTSTRAP_TOKEN = CONFIG.BOOTSTRAP_TOKEN
|
|||
DEBUG = CONFIG.DEBUG
|
||||
# SECURITY WARNING: If you run with debug turned on, more debug msg with be log
|
||||
DEBUG_DEV = CONFIG.DEBUG_DEV
|
||||
# SECURITY WARNING: If you run ansible task with debug turned on, more debug msg with be log
|
||||
DEBUG_ANSIBLE = CONFIG.DEBUG_ANSIBLE
|
||||
|
||||
# Absolute url for some case, for example email link
|
||||
SITE_URL = CONFIG.SITE_URL
|
||||
|
@ -128,6 +130,7 @@ MIDDLEWARE = [
|
|||
'jumpserver.middleware.DemoMiddleware',
|
||||
'jumpserver.middleware.RequestMiddleware',
|
||||
'jumpserver.middleware.RefererCheckMiddleware',
|
||||
'jumpserver.middleware.SQLCountMiddleware',
|
||||
'orgs.middleware.OrgMiddleware',
|
||||
'authentication.backends.oidc.middleware.OIDCRefreshIDTokenMiddleware',
|
||||
'authentication.backends.cas.middleware.CASMiddleware',
|
||||
|
|
|
@ -133,8 +133,13 @@ TICKETS_ENABLED = CONFIG.TICKETS_ENABLED
|
|||
REFERER_CHECK_ENABLED = CONFIG.REFERER_CHECK_ENABLED
|
||||
|
||||
CONNECTION_TOKEN_ENABLED = CONFIG.CONNECTION_TOKEN_ENABLED
|
||||
# Connection token
|
||||
CONNECTION_TOKEN_ONETIME_EXPIRATION = CONFIG.CONNECTION_TOKEN_ONETIME_EXPIRATION
|
||||
if CONNECTION_TOKEN_ONETIME_EXPIRATION < 5 * 60:
|
||||
# 最少5分钟
|
||||
CONNECTION_TOKEN_ONETIME_EXPIRATION = 5 * 60
|
||||
CONNECTION_TOKEN_REUSABLE = CONFIG.CONNECTION_TOKEN_REUSABLE
|
||||
CONNECTION_TOKEN_EXPIRATION_MAX = CONFIG.CONNECTION_TOKEN_EXPIRATION_MAX
|
||||
CONNECTION_TOKEN_REUSABLE_EXPIRATION = CONFIG.CONNECTION_TOKEN_REUSABLE_EXPIRATION
|
||||
|
||||
FORGOT_PASSWORD_URL = CONFIG.FORGOT_PASSWORD_URL
|
||||
|
||||
|
|
|
@ -1,9 +1,10 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
#
|
||||
import os
|
||||
|
||||
from ..const import PROJECT_DIR, CONFIG
|
||||
|
||||
LOG_DIR = os.path.join(PROJECT_DIR, 'logs')
|
||||
LOG_DIR = os.path.join(PROJECT_DIR, 'data', 'logs')
|
||||
JUMPSERVER_LOG_FILE = os.path.join(LOG_DIR, 'jumpserver.log')
|
||||
DRF_EXCEPTION_LOG_FILE = os.path.join(LOG_DIR, 'drf_exception.log')
|
||||
UNEXPECTED_EXCEPTION_LOG_FILE = os.path.join(LOG_DIR, 'unexpected_exception.log')
|
||||
|
@ -132,7 +133,6 @@ LOGGING = {
|
|||
'handlers': ['null'],
|
||||
'level': 'ERROR'
|
||||
}
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
|
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue