mirror of https://github.com/jumpserver/jumpserver
commit
e90e61e8dd
|
@ -1,5 +1,4 @@
|
||||||
.git
|
.git
|
||||||
logs/*
|
|
||||||
data/*
|
data/*
|
||||||
.github
|
.github
|
||||||
tmp/*
|
tmp/*
|
||||||
|
|
|
@ -6,8 +6,7 @@ labels: 类型:需求
|
||||||
assignees:
|
assignees:
|
||||||
- ibuler
|
- ibuler
|
||||||
- baijiangjie
|
- baijiangjie
|
||||||
|
- wojiushixiaobai
|
||||||
|
|
||||||
---
|
---
|
||||||
|
|
||||||
**请描述您的需求或者改进建议.**
|
**请描述您的需求或者改进建议.**
|
||||||
|
|
|
@ -21,17 +21,44 @@ jobs:
|
||||||
actions: 'remove-labels'
|
actions: 'remove-labels'
|
||||||
labels: '状态:待反馈'
|
labels: '状态:待反馈'
|
||||||
|
|
||||||
add-label-if-not-author:
|
add-label-if-is-member:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
if: (github.event.issue.user.id != github.event.comment.user.id) && !github.event.issue.pull_request && (github.event.issue.state == 'open')
|
|
||||||
steps:
|
steps:
|
||||||
|
- name: Checkout repository
|
||||||
|
uses: actions/checkout@v2
|
||||||
|
|
||||||
|
- name: Get Organization name
|
||||||
|
id: org_name
|
||||||
|
run: echo "data=$(echo '${{ github.repository }}' | cut -d '/' -f 1)" >> $GITHUB_OUTPUT
|
||||||
|
|
||||||
|
- name: Get Organization public members
|
||||||
|
uses: octokit/request-action@v2.x
|
||||||
|
id: members
|
||||||
|
with:
|
||||||
|
route: GET /orgs/${{ steps.org_name.outputs.data }}/public_members
|
||||||
|
env:
|
||||||
|
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||||
|
|
||||||
|
- name: Process public members data
|
||||||
|
# 将 members 中的数据转化为 login 字段的拼接字符串
|
||||||
|
id: member_names
|
||||||
|
run: echo "data=$(echo '${{ steps.members.outputs.data }}' | jq '[.[].login] | join(",")')" >> $GITHUB_OUTPUT
|
||||||
|
|
||||||
|
|
||||||
|
- run: "echo members: '${{ steps.members.outputs.data }}'"
|
||||||
|
- run: "echo member names: '${{ steps.member_names.outputs.data }}'"
|
||||||
|
- run: "echo comment user: '${{ github.event.comment.user.login }}'"
|
||||||
|
- run: "echo contains? : '${{ contains(steps.member_names.outputs.data, github.event.comment.user.login) }}'"
|
||||||
|
|
||||||
- name: Add require replay label
|
- name: Add require replay label
|
||||||
|
if: contains(steps.member_names.outputs.data, github.event.comment.user.login)
|
||||||
uses: actions-cool/issues-helper@v2
|
uses: actions-cool/issues-helper@v2
|
||||||
with:
|
with:
|
||||||
actions: 'add-labels'
|
actions: 'add-labels'
|
||||||
labels: '状态:待反馈'
|
labels: '状态:待反馈'
|
||||||
|
|
||||||
- name: Remove require handle label
|
- name: Remove require handle label
|
||||||
|
if: contains(steps.member_names.outputs.data, github.event.comment.user.login)
|
||||||
uses: actions-cool/issues-helper@v2
|
uses: actions-cool/issues-helper@v2
|
||||||
with:
|
with:
|
||||||
actions: 'remove-labels'
|
actions: 'remove-labels'
|
||||||
|
|
|
@ -35,7 +35,6 @@ celerybeat-schedule.db
|
||||||
docs/_build/
|
docs/_build/
|
||||||
xpack
|
xpack
|
||||||
xpack.bak
|
xpack.bak
|
||||||
logs/*
|
|
||||||
### Vagrant ###
|
### Vagrant ###
|
||||||
.vagrant/
|
.vagrant/
|
||||||
release/*
|
release/*
|
||||||
|
|
32
Dockerfile
32
Dockerfile
|
@ -1,4 +1,4 @@
|
||||||
FROM python:3.9-slim-buster as stage-build
|
FROM jumpserver/python:3.9-slim-buster as stage-build
|
||||||
ARG TARGETARCH
|
ARG TARGETARCH
|
||||||
|
|
||||||
ARG VERSION
|
ARG VERSION
|
||||||
|
@ -8,7 +8,7 @@ WORKDIR /opt/jumpserver
|
||||||
ADD . .
|
ADD . .
|
||||||
RUN cd utils && bash -ixeu build.sh
|
RUN cd utils && bash -ixeu build.sh
|
||||||
|
|
||||||
FROM python:3.9-slim-buster
|
FROM jumpserver/python:3.9-slim-buster
|
||||||
ARG TARGETARCH
|
ARG TARGETARCH
|
||||||
MAINTAINER JumpServer Team <ibuler@qq.com>
|
MAINTAINER JumpServer Team <ibuler@qq.com>
|
||||||
|
|
||||||
|
@ -24,6 +24,7 @@ ARG DEPENDENCIES=" \
|
||||||
libjpeg-dev \
|
libjpeg-dev \
|
||||||
libldap2-dev \
|
libldap2-dev \
|
||||||
libsasl2-dev \
|
libsasl2-dev \
|
||||||
|
libssl-dev \
|
||||||
libxml2-dev \
|
libxml2-dev \
|
||||||
libxmlsec1-dev \
|
libxmlsec1-dev \
|
||||||
libxmlsec1-openssl \
|
libxmlsec1-openssl \
|
||||||
|
@ -66,27 +67,36 @@ RUN --mount=type=cache,target=/var/cache/apt,sharing=locked,id=core \
|
||||||
|
|
||||||
ARG DOWNLOAD_URL=https://download.jumpserver.org
|
ARG DOWNLOAD_URL=https://download.jumpserver.org
|
||||||
|
|
||||||
RUN mkdir -p /opt/oracle/ \
|
RUN set -ex \
|
||||||
&& cd /opt/oracle/ \
|
&& \
|
||||||
&& wget ${DOWNLOAD_URL}/public/instantclient-basiclite-linux.${TARGETARCH}-19.10.0.0.0.zip \
|
if [ "${TARGETARCH}" == "amd64" ] || [ "${TARGETARCH}" == "arm64" ]; then \
|
||||||
&& unzip instantclient-basiclite-linux.${TARGETARCH}-19.10.0.0.0.zip \
|
mkdir -p /opt/oracle; \
|
||||||
&& sh -c "echo /opt/oracle/instantclient_19_10 > /etc/ld.so.conf.d/oracle-instantclient.conf" \
|
cd /opt/oracle; \
|
||||||
&& ldconfig \
|
wget ${DOWNLOAD_URL}/public/instantclient-basiclite-linux.${TARGETARCH}-19.10.0.0.0.zip; \
|
||||||
&& rm -f instantclient-basiclite-linux.${TARGETARCH}-19.10.0.0.0.zip
|
unzip instantclient-basiclite-linux.${TARGETARCH}-19.10.0.0.0.zip; \
|
||||||
|
echo "/opt/oracle/instantclient_19_10" > /etc/ld.so.conf.d/oracle-instantclient.conf; \
|
||||||
|
ldconfig; \
|
||||||
|
rm -f instantclient-basiclite-linux.${TARGETARCH}-19.10.0.0.0.zip; \
|
||||||
|
fi
|
||||||
|
|
||||||
WORKDIR /tmp/build
|
WORKDIR /tmp/build
|
||||||
COPY ./requirements ./requirements
|
COPY ./requirements ./requirements
|
||||||
|
|
||||||
ARG PIP_MIRROR=https://pypi.douban.com/simple
|
ARG PIP_MIRROR=https://pypi.douban.com/simple
|
||||||
ENV PIP_MIRROR=$PIP_MIRROR
|
|
||||||
ARG PIP_JMS_MIRROR=https://pypi.douban.com/simple
|
ARG PIP_JMS_MIRROR=https://pypi.douban.com/simple
|
||||||
ENV PIP_JMS_MIRROR=$PIP_JMS_MIRROR
|
|
||||||
|
|
||||||
RUN --mount=type=cache,target=/root/.cache/pip \
|
RUN --mount=type=cache,target=/root/.cache/pip \
|
||||||
set -ex \
|
set -ex \
|
||||||
&& pip config set global.index-url ${PIP_MIRROR} \
|
&& pip config set global.index-url ${PIP_MIRROR} \
|
||||||
&& pip install --upgrade pip \
|
&& pip install --upgrade pip \
|
||||||
&& pip install --upgrade setuptools wheel \
|
&& pip install --upgrade setuptools wheel \
|
||||||
|
&& \
|
||||||
|
if [ "${TARGETARCH}" == "loong64" ]; then \
|
||||||
|
pip install https://download.jumpserver.org/pypi/simple/cryptography/cryptography-38.0.4-cp39-cp39-linux_loongarch64.whl; \
|
||||||
|
pip install https://download.jumpserver.org/pypi/simple/greenlet/greenlet-1.1.2-cp39-cp39-linux_loongarch64.whl; \
|
||||||
|
pip install https://download.jumpserver.org/pypi/simple/PyNaCl/PyNaCl-1.5.0-cp39-cp39-linux_loongarch64.whl; \
|
||||||
|
pip install https://download.jumpserver.org/pypi/simple/grpcio/grpcio-1.54.2-cp39-cp39-linux_loongarch64.whl; \
|
||||||
|
fi \
|
||||||
&& pip install $(grep -E 'jms|jumpserver' requirements/requirements.txt) -i ${PIP_JMS_MIRROR} \
|
&& pip install $(grep -E 'jms|jumpserver' requirements/requirements.txt) -i ${PIP_JMS_MIRROR} \
|
||||||
&& pip install -r requirements/requirements.txt
|
&& pip install -r requirements/requirements.txt
|
||||||
|
|
||||||
|
|
|
@ -1,97 +0,0 @@
|
||||||
FROM python:3.9-slim-buster as stage-build
|
|
||||||
ARG TARGETARCH
|
|
||||||
|
|
||||||
ARG VERSION
|
|
||||||
ENV VERSION=$VERSION
|
|
||||||
|
|
||||||
WORKDIR /opt/jumpserver
|
|
||||||
ADD . .
|
|
||||||
RUN cd utils && bash -ixeu build.sh
|
|
||||||
|
|
||||||
FROM python:3.9-slim-buster
|
|
||||||
ARG TARGETARCH
|
|
||||||
MAINTAINER JumpServer Team <ibuler@qq.com>
|
|
||||||
|
|
||||||
ARG BUILD_DEPENDENCIES=" \
|
|
||||||
g++ \
|
|
||||||
make \
|
|
||||||
pkg-config"
|
|
||||||
|
|
||||||
ARG DEPENDENCIES=" \
|
|
||||||
freetds-dev \
|
|
||||||
libpq-dev \
|
|
||||||
libffi-dev \
|
|
||||||
libjpeg-dev \
|
|
||||||
libldap2-dev \
|
|
||||||
libsasl2-dev \
|
|
||||||
libssl-dev \
|
|
||||||
libxml2-dev \
|
|
||||||
libxmlsec1-dev \
|
|
||||||
libxmlsec1-openssl \
|
|
||||||
freerdp2-dev \
|
|
||||||
libaio-dev"
|
|
||||||
|
|
||||||
ARG TOOLS=" \
|
|
||||||
ca-certificates \
|
|
||||||
curl \
|
|
||||||
default-libmysqlclient-dev \
|
|
||||||
default-mysql-client \
|
|
||||||
locales \
|
|
||||||
openssh-client \
|
|
||||||
procps \
|
|
||||||
sshpass \
|
|
||||||
telnet \
|
|
||||||
unzip \
|
|
||||||
vim \
|
|
||||||
git \
|
|
||||||
wget"
|
|
||||||
|
|
||||||
RUN --mount=type=cache,target=/var/cache/apt,sharing=locked,id=core \
|
|
||||||
set -ex \
|
|
||||||
&& ln -sf /usr/share/zoneinfo/Asia/Shanghai /etc/localtime \
|
|
||||||
&& apt-get update \
|
|
||||||
&& apt-get -y install --no-install-recommends ${BUILD_DEPENDENCIES} \
|
|
||||||
&& apt-get -y install --no-install-recommends ${DEPENDENCIES} \
|
|
||||||
&& apt-get -y install --no-install-recommends ${TOOLS} \
|
|
||||||
&& mkdir -p /root/.ssh/ \
|
|
||||||
&& echo "Host *\n\tStrictHostKeyChecking no\n\tUserKnownHostsFile /dev/null\n\tCiphers +aes128-cbc\n\tKexAlgorithms +diffie-hellman-group1-sha1\n\tHostKeyAlgorithms +ssh-rsa" > /root/.ssh/config \
|
|
||||||
&& echo "set mouse-=a" > ~/.vimrc \
|
|
||||||
&& echo "no" | dpkg-reconfigure dash \
|
|
||||||
&& echo "zh_CN.UTF-8" | dpkg-reconfigure locales \
|
|
||||||
&& sed -i "s@# export @export @g" ~/.bashrc \
|
|
||||||
&& sed -i "s@# alias @alias @g" ~/.bashrc \
|
|
||||||
&& rm -rf /var/lib/apt/lists/*
|
|
||||||
|
|
||||||
WORKDIR /tmp/build
|
|
||||||
COPY ./requirements ./requirements
|
|
||||||
|
|
||||||
ARG PIP_MIRROR=https://pypi.douban.com/simple
|
|
||||||
ENV PIP_MIRROR=$PIP_MIRROR
|
|
||||||
ARG PIP_JMS_MIRROR=https://pypi.douban.com/simple
|
|
||||||
ENV PIP_JMS_MIRROR=$PIP_JMS_MIRROR
|
|
||||||
|
|
||||||
RUN --mount=type=cache,target=/root/.cache/pip \
|
|
||||||
set -ex \
|
|
||||||
&& pip config set global.index-url ${PIP_MIRROR} \
|
|
||||||
&& pip install --upgrade pip \
|
|
||||||
&& pip install --upgrade setuptools wheel \
|
|
||||||
&& pip install https://download.jumpserver.org/pypi/simple/cryptography/cryptography-38.0.4-cp39-cp39-linux_loongarch64.whl \
|
|
||||||
&& pip install https://download.jumpserver.org/pypi/simple/greenlet/greenlet-1.1.2-cp39-cp39-linux_loongarch64.whl \
|
|
||||||
&& pip install https://download.jumpserver.org/pypi/simple/PyNaCl/PyNaCl-1.5.0-cp39-cp39-linux_loongarch64.whl \
|
|
||||||
&& pip install https://download.jumpserver.org/pypi/simple/grpcio/grpcio-1.54.2-cp39-cp39-linux_loongarch64.whl \
|
|
||||||
&& pip install $(grep -E 'jms|jumpserver' requirements/requirements.txt) -i ${PIP_JMS_MIRROR} \
|
|
||||||
&& pip install -r requirements/requirements.txt
|
|
||||||
|
|
||||||
COPY --from=stage-build /opt/jumpserver/release/jumpserver /opt/jumpserver
|
|
||||||
RUN echo > /opt/jumpserver/config.yml \
|
|
||||||
&& rm -rf /tmp/build
|
|
||||||
|
|
||||||
WORKDIR /opt/jumpserver
|
|
||||||
VOLUME /opt/jumpserver/data
|
|
||||||
VOLUME /opt/jumpserver/logs
|
|
||||||
|
|
||||||
ENV LANG=zh_CN.UTF-8
|
|
||||||
|
|
||||||
EXPOSE 8080
|
|
||||||
|
|
||||||
ENTRYPOINT ["./entrypoint.sh"]
|
|
48
README.md
48
README.md
|
@ -17,18 +17,16 @@
|
||||||
9 年时间,倾情投入,用心做好一款开源堡垒机。
|
9 年时间,倾情投入,用心做好一款开源堡垒机。
|
||||||
</p>
|
</p>
|
||||||
|
|
||||||
| :warning: 注意 :warning: |
|
JumpServer 是广受欢迎的开源堡垒机,是符合 4A 规范的专业运维安全审计系统。
|
||||||
|:-------------------------------------------------------------------------------------------------------------------------:|
|
|
||||||
| 3.0 架构上和 2.0 变化较大,建议全新安装一套环境来体验。如需升级,请务必升级前进行备份,并[查阅文档](https://kb.fit2cloud.com/?p=06638d69-f109-4333-b5bf-65b17b297ed9) |
|
|
||||||
|
|
||||||
--------------------------
|
JumpServer 堡垒机帮助企业以更安全的方式管控和登录各种类型的资产,包括:
|
||||||
|
|
||||||
JumpServer 是广受欢迎的开源堡垒机,是符合 4A 规范的专业运维安全审计系统。JumpServer 堡垒机帮助企业以更安全的方式管控和登录各种类型的资产,包括:
|
|
||||||
|
|
||||||
- **SSH**: Linux / Unix / 网络设备 等;
|
- **SSH**: Linux / Unix / 网络设备 等;
|
||||||
- **Windows**: Web 方式连接 / 原生 RDP 连接;
|
- **Windows**: Web 方式连接 / 原生 RDP 连接;
|
||||||
- **数据库**: MySQL / Oracle / SQLServer / PostgreSQL 等;
|
- **数据库**: MySQL / MariaDB / PostgreSQL / Oracle / SQLServer / ClickHouse 等;
|
||||||
- **Kubernetes**: 支持连接到 K8s 集群中的 Pods;
|
- **NoSQL**: Redis / MongoDB 等;
|
||||||
|
- **GPT**: ChatGPT 等;
|
||||||
|
- **云服务**: Kubernetes / VMware vSphere 等;
|
||||||
- **Web 站点**: 各类系统的 Web 管理后台;
|
- **Web 站点**: 各类系统的 Web 管理后台;
|
||||||
- **应用**: 通过 Remote App 连接各类应用。
|
- **应用**: 通过 Remote App 连接各类应用。
|
||||||
|
|
||||||
|
@ -81,11 +79,7 @@ JumpServer 是广受欢迎的开源堡垒机,是符合 4A 规范的专业运
|
||||||
|
|
||||||
如果您在使用过程中有任何疑问或对建议,欢迎提交 [GitHub Issue](https://github.com/jumpserver/jumpserver/issues/new/choose)。
|
如果您在使用过程中有任何疑问或对建议,欢迎提交 [GitHub Issue](https://github.com/jumpserver/jumpserver/issues/new/choose)。
|
||||||
|
|
||||||
您也可以到我们的 [社区论坛](https://bbs.fit2cloud.com/c/js/5) 及微信交流群当中进行交流沟通。
|
您也可以到我们的 [社区论坛](https://bbs.fit2cloud.com/c/js/5) 当中进行交流沟通。
|
||||||
|
|
||||||
**微信交流群**
|
|
||||||
|
|
||||||
<img src="https://download.jumpserver.org/images/wecom-group.jpeg" alt="微信群二维码" width="200"/>
|
|
||||||
|
|
||||||
### 参与贡献
|
### 参与贡献
|
||||||
|
|
||||||
|
@ -95,15 +89,20 @@ JumpServer 是广受欢迎的开源堡垒机,是符合 4A 规范的专业运
|
||||||
|
|
||||||
## 组件项目
|
## 组件项目
|
||||||
|
|
||||||
| 项目 | 状态 | 描述 |
|
| 项目 | 状态 | 描述 |
|
||||||
|--------------------------------------------------------|------------------------------------------------------------------------------------------------------------------------------------------------------------------------|-----------------------------------------------------------------------------------------|
|
|--------------------------------------------------------|------------------------------------------------------------------------------------------------------------------------------------------------------------------------|-----------------------------------------------------------------------------------|
|
||||||
| [Lina](https://github.com/jumpserver/lina) | <a href="https://github.com/jumpserver/lina/releases"><img alt="Lina release" src="https://img.shields.io/github/release/jumpserver/lina.svg" /></a> | JumpServer Web UI 项目 |
|
| [Lina](https://github.com/jumpserver/lina) | <a href="https://github.com/jumpserver/lina/releases"><img alt="Lina release" src="https://img.shields.io/github/release/jumpserver/lina.svg" /></a> | JumpServer Web UI 项目 |
|
||||||
| [Luna](https://github.com/jumpserver/luna) | <a href="https://github.com/jumpserver/luna/releases"><img alt="Luna release" src="https://img.shields.io/github/release/jumpserver/luna.svg" /></a> | JumpServer Web Terminal 项目 |
|
| [Luna](https://github.com/jumpserver/luna) | <a href="https://github.com/jumpserver/luna/releases"><img alt="Luna release" src="https://img.shields.io/github/release/jumpserver/luna.svg" /></a> | JumpServer Web Terminal 项目 |
|
||||||
| [KoKo](https://github.com/jumpserver/koko) | <a href="https://github.com/jumpserver/koko/releases"><img alt="Koko release" src="https://img.shields.io/github/release/jumpserver/koko.svg" /></a> | JumpServer 字符协议 Connector 项目,替代原来 Python 版本的 [Coco](https://github.com/jumpserver/coco) |
|
| [KoKo](https://github.com/jumpserver/koko) | <a href="https://github.com/jumpserver/koko/releases"><img alt="Koko release" src="https://img.shields.io/github/release/jumpserver/koko.svg" /></a> | JumpServer 字符协议 Connector 项目 |
|
||||||
| [Lion](https://github.com/jumpserver/lion-release) | <a href="https://github.com/jumpserver/lion-release/releases"><img alt="Lion release" src="https://img.shields.io/github/release/jumpserver/lion-release.svg" /></a> | JumpServer 图形协议 Connector 项目,依赖 [Apache Guacamole](https://guacamole.apache.org/) |
|
| [Lion](https://github.com/jumpserver/lion-release) | <a href="https://github.com/jumpserver/lion-release/releases"><img alt="Lion release" src="https://img.shields.io/github/release/jumpserver/lion-release.svg" /></a> | JumpServer 图形协议 Connector 项目,依赖 [Apache Guacamole](https://guacamole.apache.org/) |
|
||||||
| [Magnus](https://github.com/jumpserver/magnus-release) | <a href="https://github.com/jumpserver/magnus-release/releases"><img alt="Magnus release" src="https://img.shields.io/github/release/jumpserver/magnus-release.svg" /> | JumpServer 数据库代理 Connector 项目 |
|
| [Razor](https://github.com/jumpserver/razor) | <img alt="Chen" src="https://img.shields.io/badge/release-私有发布-red" /> | JumpServer RDP 代理 Connector 项目 |
|
||||||
| [Clients](https://github.com/jumpserver/clients) | <a href="https://github.com/jumpserver/clients/releases"><img alt="Clients release" src="https://img.shields.io/github/release/jumpserver/clients.svg" /> | JumpServer 客户端 项目 |
|
| [Tinker](https://github.com/jumpserver/tinker) | <img alt="Tinker" src="https://img.shields.io/badge/release-私有发布-red" /> | JumpServer 远程应用 Connector 项目 |
|
||||||
| [Installer](https://github.com/jumpserver/installer) | <a href="https://github.com/jumpserver/installer/releases"><img alt="Installer release" src="https://img.shields.io/github/release/jumpserver/installer.svg" /> | JumpServer 安装包 项目 |
|
| [Magnus](https://github.com/jumpserver/magnus-release) | <a href="https://github.com/jumpserver/magnus-release/releases"><img alt="Magnus release" src="https://img.shields.io/github/release/jumpserver/magnus-release.svg" /> | JumpServer 数据库代理 Connector 项目 |
|
||||||
|
| [Chen](https://github.com/jumpserver/chen-release) | <a href="https://github.com/jumpserver/chen-release/releases"><img alt="Chen release" src="https://img.shields.io/github/release/jumpserver/chen-release.svg" /> | JumpServer Web DB 项目,替代原来的 OmniDB |
|
||||||
|
| [Kael](https://github.com/jumpserver/kael) | <a href="https://github.com/jumpserver/kael/releases"><img alt="Kael release" src="https://img.shields.io/github/release/jumpserver/kael.svg" /> | JumpServer 连接 GPT 资产的组件项目 |
|
||||||
|
| [Wisp](https://github.com/jumpserver/wisp) | <a href="https://github.com/jumpserver/wisp/releases"><img alt="Magnus release" src="https://img.shields.io/github/release/jumpserver/wisp.svg" /> | JumpServer 各系统终端组件和 Core Api 通信的组件项目 |
|
||||||
|
| [Clients](https://github.com/jumpserver/clients) | <a href="https://github.com/jumpserver/clients/releases"><img alt="Clients release" src="https://img.shields.io/github/release/jumpserver/clients.svg" /> | JumpServer 客户端 项目 |
|
||||||
|
| [Installer](https://github.com/jumpserver/installer) | <a href="https://github.com/jumpserver/installer/releases"><img alt="Installer release" src="https://img.shields.io/github/release/jumpserver/installer.svg" /> | JumpServer 安装包 项目 |
|
||||||
|
|
||||||
## 安全说明
|
## 安全说明
|
||||||
|
|
||||||
|
@ -113,11 +112,6 @@ JumpServer是一款安全产品,请参考 [基本安全建议](https://docs.ju
|
||||||
- 邮箱:support@fit2cloud.com
|
- 邮箱:support@fit2cloud.com
|
||||||
- 电话:400-052-0755
|
- 电话:400-052-0755
|
||||||
|
|
||||||
## 致谢开源
|
|
||||||
|
|
||||||
- [Apache Guacamole](https://guacamole.apache.org/): Web 页面连接 RDP、SSH、VNC 等协议资产,JumpServer Lion 组件使用到该项目;
|
|
||||||
- [OmniDB](https://omnidb.org/): Web 页面连接使用数据库,JumpServer Web 数据库组件使用到该项目。
|
|
||||||
|
|
||||||
## License & Copyright
|
## License & Copyright
|
||||||
|
|
||||||
Copyright (c) 2014-2023 飞致云 FIT2CLOUD, All rights reserved.
|
Copyright (c) 2014-2023 飞致云 FIT2CLOUD, All rights reserved.
|
||||||
|
|
|
@ -49,8 +49,7 @@ class AccountTemplateViewSet(OrgBulkModelViewSet):
|
||||||
@action(methods=['get'], detail=False, url_path='su-from-account-templates')
|
@action(methods=['get'], detail=False, url_path='su-from-account-templates')
|
||||||
def su_from_account_templates(self, request, *args, **kwargs):
|
def su_from_account_templates(self, request, *args, **kwargs):
|
||||||
pk = request.query_params.get('template_id')
|
pk = request.query_params.get('template_id')
|
||||||
template = AccountTemplate.objects.filter(pk=pk).first()
|
templates = AccountTemplate.get_su_from_account_templates(pk)
|
||||||
templates = AccountTemplate.get_su_from_account_templates(template)
|
|
||||||
templates = self.filter_queryset(templates)
|
templates = self.filter_queryset(templates)
|
||||||
serializer = self.get_serializer(templates, many=True)
|
serializer = self.get_serializer(templates, many=True)
|
||||||
return Response(data=serializer.data)
|
return Response(data=serializer.data)
|
||||||
|
|
|
@ -4,9 +4,58 @@ category: host
|
||||||
type:
|
type:
|
||||||
- AIX
|
- AIX
|
||||||
method: change_secret
|
method: change_secret
|
||||||
|
params:
|
||||||
|
- name: sudo
|
||||||
|
type: str
|
||||||
|
label: 'Sudo'
|
||||||
|
default: '/bin/whoami'
|
||||||
|
help_text: "{{ 'Params sudo help text' | trans }}"
|
||||||
|
|
||||||
|
- name: shell
|
||||||
|
type: str
|
||||||
|
label: 'Shell'
|
||||||
|
default: '/bin/bash'
|
||||||
|
|
||||||
|
- name: home
|
||||||
|
type: str
|
||||||
|
label: "{{ 'Params home label' | trans }}"
|
||||||
|
default: ''
|
||||||
|
help_text: "{{ 'Params home help text' | trans }}"
|
||||||
|
|
||||||
|
- name: groups
|
||||||
|
type: str
|
||||||
|
label: "{{ 'Params groups label' | trans }}"
|
||||||
|
default: ''
|
||||||
|
help_text: "{{ 'Params groups help text' | trans }}"
|
||||||
|
|
||||||
i18n:
|
i18n:
|
||||||
AIX account change secret:
|
AIX account change secret:
|
||||||
zh: 使用 Ansible 模块 user 执行账号改密 (DES)
|
zh: '使用 Ansible 模块 user 执行账号改密 (DES)'
|
||||||
ja: Ansible user モジュールを使用してアカウントのパスワード変更 (DES)
|
ja: 'Ansible user モジュールを使用してアカウントのパスワード変更 (DES)'
|
||||||
en: Using Ansible module user to change account secret (DES)
|
en: 'Using Ansible module user to change account secret (DES)'
|
||||||
|
|
||||||
|
Params sudo help text:
|
||||||
|
zh: '使用逗号分隔多个命令,如: /bin/whoami,/sbin/ifconfig'
|
||||||
|
ja: 'コンマで区切って複数のコマンドを入力してください。例: /bin/whoami,/sbin/ifconfig'
|
||||||
|
en: 'Use commas to separate multiple commands, such as: /bin/whoami,/sbin/ifconfig'
|
||||||
|
|
||||||
|
Params home help text:
|
||||||
|
zh: '默认家目录 /home/{账号用户名}'
|
||||||
|
ja: 'デフォルトのホームディレクトリ /home/{アカウントユーザ名}'
|
||||||
|
en: 'Default home directory /home/{account username}'
|
||||||
|
|
||||||
|
Params groups help text:
|
||||||
|
zh: '请输入用户组,多个用户组使用逗号分隔(需填写已存在的用户组)'
|
||||||
|
ja: 'グループを入力してください。複数のグループはコンマで区切ってください(既存のグループを入力してください)'
|
||||||
|
en: 'Please enter the group. Multiple groups are separated by commas (please enter the existing group)'
|
||||||
|
|
||||||
|
Params home label:
|
||||||
|
zh: '家目录'
|
||||||
|
ja: 'ホームディレクトリ'
|
||||||
|
en: 'Home'
|
||||||
|
|
||||||
|
Params groups label:
|
||||||
|
zh: '用户组'
|
||||||
|
ja: 'グループ'
|
||||||
|
en: 'Groups'
|
||||||
|
|
||||||
|
|
|
@ -4,6 +4,26 @@
|
||||||
- name: Test privileged account
|
- name: Test privileged account
|
||||||
ansible.builtin.ping:
|
ansible.builtin.ping:
|
||||||
|
|
||||||
|
- name: Check user
|
||||||
|
ansible.builtin.user:
|
||||||
|
name: "{{ account.username }}"
|
||||||
|
shell: "{{ params.shell }}"
|
||||||
|
home: "{{ params.home | default('/home/' + account.username, true) }}"
|
||||||
|
groups: "{{ params.groups }}"
|
||||||
|
expires: -1
|
||||||
|
state: present
|
||||||
|
|
||||||
|
- name: "Add {{ account.username }} group"
|
||||||
|
ansible.builtin.group:
|
||||||
|
name: "{{ account.username }}"
|
||||||
|
state: present
|
||||||
|
|
||||||
|
- name: Add user groups
|
||||||
|
ansible.builtin.user:
|
||||||
|
name: "{{ account.username }}"
|
||||||
|
groups: "{{ params.groups }}"
|
||||||
|
when: params.groups
|
||||||
|
|
||||||
- name: Change password
|
- name: Change password
|
||||||
ansible.builtin.user:
|
ansible.builtin.user:
|
||||||
name: "{{ account.username }}"
|
name: "{{ account.username }}"
|
||||||
|
@ -33,6 +53,16 @@
|
||||||
exclusive: "{{ ssh_params.exclusive }}"
|
exclusive: "{{ ssh_params.exclusive }}"
|
||||||
when: account.secret_type == "ssh_key"
|
when: account.secret_type == "ssh_key"
|
||||||
|
|
||||||
|
- name: Set sudo setting
|
||||||
|
ansible.builtin.lineinfile:
|
||||||
|
dest: /etc/sudoers
|
||||||
|
state: present
|
||||||
|
regexp: "^{{ account.username }} ALL="
|
||||||
|
line: "{{ account.username + ' ALL=(ALL) NOPASSWD: ' + params.sudo }}"
|
||||||
|
validate: visudo -cf %s
|
||||||
|
when:
|
||||||
|
- params.sudo
|
||||||
|
|
||||||
- name: Refresh connection
|
- name: Refresh connection
|
||||||
ansible.builtin.meta: reset_connection
|
ansible.builtin.meta: reset_connection
|
||||||
|
|
||||||
|
|
|
@ -5,9 +5,59 @@ type:
|
||||||
- unix
|
- unix
|
||||||
- linux
|
- linux
|
||||||
method: change_secret
|
method: change_secret
|
||||||
|
params:
|
||||||
|
- name: sudo
|
||||||
|
type: str
|
||||||
|
label: 'Sudo'
|
||||||
|
default: '/bin/whoami'
|
||||||
|
help_text: "{{ 'Params sudo help text' | trans }}"
|
||||||
|
|
||||||
|
- name: shell
|
||||||
|
type: str
|
||||||
|
label: 'Shell'
|
||||||
|
default: '/bin/bash'
|
||||||
|
help_text: ''
|
||||||
|
|
||||||
|
- name: home
|
||||||
|
type: str
|
||||||
|
label: "{{ 'Params home label' | trans }}"
|
||||||
|
default: ''
|
||||||
|
help_text: "{{ 'Params home help text' | trans }}"
|
||||||
|
|
||||||
|
- name: groups
|
||||||
|
type: str
|
||||||
|
label: "{{ 'Params groups label' | trans }}"
|
||||||
|
default: ''
|
||||||
|
help_text: "{{ 'Params groups help text' | trans }}"
|
||||||
|
|
||||||
i18n:
|
i18n:
|
||||||
Posix account change secret:
|
Posix account change secret:
|
||||||
zh: 使用 Ansible 模块 user 执行账号改密 (SHA512)
|
zh: '使用 Ansible 模块 user 执行账号改密 (SHA512)'
|
||||||
ja: Ansible user モジュールを使用して アカウントのパスワード変更 (SHA512)
|
ja: 'Ansible user モジュールを使用して アカウントのパスワード変更 (SHA512)'
|
||||||
en: Using Ansible module user to change account secret (SHA512)
|
en: 'Using Ansible module user to change account secret (SHA512)'
|
||||||
|
|
||||||
|
Params sudo help text:
|
||||||
|
zh: '使用逗号分隔多个命令,如: /bin/whoami,/sbin/ifconfig'
|
||||||
|
ja: 'コンマで区切って複数のコマンドを入力してください。例: /bin/whoami,/sbin/ifconfig'
|
||||||
|
en: 'Use commas to separate multiple commands, such as: /bin/whoami,/sbin/ifconfig'
|
||||||
|
|
||||||
|
Params home help text:
|
||||||
|
zh: '默认家目录 /home/{账号用户名}'
|
||||||
|
ja: 'デフォルトのホームディレクトリ /home/{アカウントユーザ名}'
|
||||||
|
en: 'Default home directory /home/{account username}'
|
||||||
|
|
||||||
|
Params groups help text:
|
||||||
|
zh: '请输入用户组,多个用户组使用逗号分隔(需填写已存在的用户组)'
|
||||||
|
ja: 'グループを入力してください。複数のグループはコンマで区切ってください(既存のグループを入力してください)'
|
||||||
|
en: 'Please enter the group. Multiple groups are separated by commas (please enter the existing group)'
|
||||||
|
|
||||||
|
Params home label:
|
||||||
|
zh: '家目录'
|
||||||
|
ja: 'ホームディレクトリ'
|
||||||
|
en: 'Home'
|
||||||
|
|
||||||
|
Params groups label:
|
||||||
|
zh: '用户组'
|
||||||
|
ja: 'グループ'
|
||||||
|
en: 'Groups'
|
||||||
|
|
||||||
|
|
|
@ -8,17 +8,13 @@
|
||||||
# debug:
|
# debug:
|
||||||
# msg: "Username: {{ account.username }}, Password: {{ account.secret }}"
|
# msg: "Username: {{ account.username }}, Password: {{ account.secret }}"
|
||||||
|
|
||||||
|
|
||||||
- name: Get groups of a Windows user
|
|
||||||
ansible.windows.win_user:
|
|
||||||
name: "{{ jms_account.username }}"
|
|
||||||
register: user_info
|
|
||||||
|
|
||||||
- name: Change password
|
- name: Change password
|
||||||
ansible.windows.win_user:
|
ansible.windows.win_user:
|
||||||
|
fullname: "{{ account.username}}"
|
||||||
name: "{{ account.username }}"
|
name: "{{ account.username }}"
|
||||||
password: "{{ account.secret }}"
|
password: "{{ account.secret }}"
|
||||||
groups: "{{ user_info.groups[0].name }}"
|
password_never_expires: yes
|
||||||
|
groups: "{{ params.groups }}"
|
||||||
groups_action: add
|
groups_action: add
|
||||||
update_password: always
|
update_password: always
|
||||||
ignore_errors: true
|
ignore_errors: true
|
||||||
|
|
|
@ -5,9 +5,22 @@ method: change_secret
|
||||||
category: host
|
category: host
|
||||||
type:
|
type:
|
||||||
- windows
|
- windows
|
||||||
|
params:
|
||||||
|
- name: groups
|
||||||
|
type: str
|
||||||
|
label: '用户组'
|
||||||
|
default: 'Users,Remote Desktop Users'
|
||||||
|
help_text: "{{ 'Params groups help text' | trans }}"
|
||||||
|
|
||||||
|
|
||||||
i18n:
|
i18n:
|
||||||
Windows account change secret:
|
Windows account change secret:
|
||||||
zh: 使用 Ansible 模块 win_user 执行 Windows 账号改密
|
zh: '使用 Ansible 模块 win_user 执行 Windows 账号改密'
|
||||||
ja: Ansible win_user モジュールを使用して Windows アカウントのパスワード変更
|
ja: 'Ansible win_user モジュールを使用して Windows アカウントのパスワード変更'
|
||||||
en: Using Ansible module win_user to change Windows account secret
|
en: 'Using Ansible module win_user to change Windows account secret'
|
||||||
|
|
||||||
|
Params groups help text:
|
||||||
|
zh: '请输入用户组,多个用户组使用逗号分隔(需填写已存在的用户组)'
|
||||||
|
ja: 'グループを入力してください。複数のグループはコンマで区切ってください(既存のグループを入力してください)'
|
||||||
|
en: 'Please enter the group. Multiple groups are separated by commas (please enter the existing group)'
|
||||||
|
|
||||||
|
|
|
@ -1,11 +1,12 @@
|
||||||
- hosts: custom
|
- hosts: custom
|
||||||
gather_facts: no
|
gather_facts: no
|
||||||
vars:
|
vars:
|
||||||
|
ansible_shell_type: sh
|
||||||
ansible_connection: local
|
ansible_connection: local
|
||||||
|
|
||||||
tasks:
|
tasks:
|
||||||
- name: Verify account
|
- name: Verify account (pyfreerdp)
|
||||||
ssh_ping:
|
rdp_ping:
|
||||||
login_host: "{{ jms_asset.address }}"
|
login_host: "{{ jms_asset.address }}"
|
||||||
login_port: "{{ jms_asset.port }}"
|
login_port: "{{ jms_asset.port }}"
|
||||||
login_user: "{{ account.username }}"
|
login_user: "{{ account.username }}"
|
||||||
|
|
|
@ -4,7 +4,7 @@
|
||||||
ansible_connection: local
|
ansible_connection: local
|
||||||
|
|
||||||
tasks:
|
tasks:
|
||||||
- name: Verify account
|
- name: Verify account (paramiko)
|
||||||
ssh_ping:
|
ssh_ping:
|
||||||
login_host: "{{ jms_asset.address }}"
|
login_host: "{{ jms_asset.address }}"
|
||||||
login_port: "{{ jms_asset.port }}"
|
login_port: "{{ jms_asset.port }}"
|
||||||
|
|
|
@ -7,12 +7,14 @@ class SecretType(TextChoices):
|
||||||
SSH_KEY = 'ssh_key', _('SSH key')
|
SSH_KEY = 'ssh_key', _('SSH key')
|
||||||
ACCESS_KEY = 'access_key', _('Access key')
|
ACCESS_KEY = 'access_key', _('Access key')
|
||||||
TOKEN = 'token', _('Token')
|
TOKEN = 'token', _('Token')
|
||||||
|
API_KEY = 'api_key', _("API key")
|
||||||
|
|
||||||
|
|
||||||
class AliasAccount(TextChoices):
|
class AliasAccount(TextChoices):
|
||||||
ALL = '@ALL', _('All')
|
ALL = '@ALL', _('All')
|
||||||
INPUT = '@INPUT', _('Manual input')
|
INPUT = '@INPUT', _('Manual input')
|
||||||
USER = '@USER', _('Dynamic user')
|
USER = '@USER', _('Dynamic user')
|
||||||
|
ANON = '@ANON', _('Anonymous account')
|
||||||
|
|
||||||
|
|
||||||
class Source(TextChoices):
|
class Source(TextChoices):
|
||||||
|
|
|
@ -45,7 +45,7 @@ class AccountFilterSet(BaseFilterSet):
|
||||||
|
|
||||||
class Meta:
|
class Meta:
|
||||||
model = Account
|
model = Account
|
||||||
fields = ['id', 'asset_id', 'source_id']
|
fields = ['id', 'asset_id', 'source_id', 'secret_type']
|
||||||
|
|
||||||
|
|
||||||
class GatheredAccountFilterSet(BaseFilterSet):
|
class GatheredAccountFilterSet(BaseFilterSet):
|
||||||
|
|
|
@ -1,12 +1,14 @@
|
||||||
# Generated by Django 3.2.14 on 2022-12-28 07:29
|
# Generated by Django 3.2.14 on 2022-12-28 07:29
|
||||||
|
|
||||||
|
import uuid
|
||||||
|
|
||||||
|
import django.db.models.deletion
|
||||||
|
import simple_history.models
|
||||||
|
from django.conf import settings
|
||||||
|
from django.db import migrations, models
|
||||||
|
|
||||||
import common.db.encoder
|
import common.db.encoder
|
||||||
import common.db.fields
|
import common.db.fields
|
||||||
from django.conf import settings
|
|
||||||
from django.db import migrations, models
|
|
||||||
import django.db.models.deletion
|
|
||||||
import simple_history.models
|
|
||||||
import uuid
|
|
||||||
|
|
||||||
|
|
||||||
class Migration(migrations.Migration):
|
class Migration(migrations.Migration):
|
||||||
|
@ -29,13 +31,16 @@ class Migration(migrations.Migration):
|
||||||
('id', models.UUIDField(default=uuid.uuid4, primary_key=True, serialize=False)),
|
('id', models.UUIDField(default=uuid.uuid4, primary_key=True, serialize=False)),
|
||||||
('org_id',
|
('org_id',
|
||||||
models.CharField(blank=True, db_index=True, default='', max_length=36, verbose_name='Organization')),
|
models.CharField(blank=True, db_index=True, default='', max_length=36, verbose_name='Organization')),
|
||||||
('connectivity', models.CharField(choices=[('-', 'Unknown'), ('ok', 'Ok'), ('err', 'Error')], default='-', max_length=16, verbose_name='Connectivity')),
|
('connectivity',
|
||||||
|
models.CharField(choices=[('-', 'Unknown'), ('ok', 'Ok'), ('err', 'Error')], default='-',
|
||||||
|
max_length=16, verbose_name='Connectivity')),
|
||||||
('date_verified', models.DateTimeField(null=True, verbose_name='Date verified')),
|
('date_verified', models.DateTimeField(null=True, verbose_name='Date verified')),
|
||||||
('name', models.CharField(max_length=128, verbose_name='Name')),
|
('name', models.CharField(max_length=128, verbose_name='Name')),
|
||||||
('username', models.CharField(blank=True, db_index=True, max_length=128, verbose_name='Username')),
|
('username', models.CharField(blank=True, db_index=True, max_length=128, verbose_name='Username')),
|
||||||
('secret_type', models.CharField(
|
('secret_type', models.CharField(
|
||||||
choices=[('password', 'Password'), ('ssh_key', 'SSH key'), ('access_key', 'Access key'),
|
choices=[('password', 'Password'), ('ssh_key', 'SSH key'), ('access_key', 'Access key'),
|
||||||
('token', 'Token')], default='password', max_length=16, verbose_name='Secret type')),
|
('token', 'Token'), ('api_key', 'API key')], default='password', max_length=16,
|
||||||
|
verbose_name='Secret type')),
|
||||||
('secret', common.db.fields.EncryptTextField(blank=True, null=True, verbose_name='Secret')),
|
('secret', common.db.fields.EncryptTextField(blank=True, null=True, verbose_name='Secret')),
|
||||||
('privileged', models.BooleanField(default=False, verbose_name='Privileged')),
|
('privileged', models.BooleanField(default=False, verbose_name='Privileged')),
|
||||||
('is_active', models.BooleanField(default=True, verbose_name='Is active')),
|
('is_active', models.BooleanField(default=True, verbose_name='Is active')),
|
||||||
|
@ -61,7 +66,8 @@ class Migration(migrations.Migration):
|
||||||
('id', models.UUIDField(db_index=True, default=uuid.uuid4)),
|
('id', models.UUIDField(db_index=True, default=uuid.uuid4)),
|
||||||
('secret_type', models.CharField(
|
('secret_type', models.CharField(
|
||||||
choices=[('password', 'Password'), ('ssh_key', 'SSH key'), ('access_key', 'Access key'),
|
choices=[('password', 'Password'), ('ssh_key', 'SSH key'), ('access_key', 'Access key'),
|
||||||
('token', 'Token')], default='password', max_length=16, verbose_name='Secret type')),
|
('token', 'Token'), ('api_key', 'API key')], default='password', max_length=16,
|
||||||
|
verbose_name='Secret type')),
|
||||||
('secret', common.db.fields.EncryptTextField(blank=True, null=True, verbose_name='Secret')),
|
('secret', common.db.fields.EncryptTextField(blank=True, null=True, verbose_name='Secret')),
|
||||||
('version', models.IntegerField(default=0, verbose_name='Version')),
|
('version', models.IntegerField(default=0, verbose_name='Version')),
|
||||||
('history_id', models.AutoField(primary_key=True, serialize=False)),
|
('history_id', models.AutoField(primary_key=True, serialize=False)),
|
||||||
|
@ -96,7 +102,8 @@ class Migration(migrations.Migration):
|
||||||
('username', models.CharField(blank=True, db_index=True, max_length=128, verbose_name='Username')),
|
('username', models.CharField(blank=True, db_index=True, max_length=128, verbose_name='Username')),
|
||||||
('secret_type', models.CharField(
|
('secret_type', models.CharField(
|
||||||
choices=[('password', 'Password'), ('ssh_key', 'SSH key'), ('access_key', 'Access key'),
|
choices=[('password', 'Password'), ('ssh_key', 'SSH key'), ('access_key', 'Access key'),
|
||||||
('token', 'Token')], default='password', max_length=16, verbose_name='Secret type')),
|
('token', 'Token'), ('api_key', 'API key')], default='password', max_length=16,
|
||||||
|
verbose_name='Secret type')),
|
||||||
('secret', common.db.fields.EncryptTextField(blank=True, null=True, verbose_name='Secret')),
|
('secret', common.db.fields.EncryptTextField(blank=True, null=True, verbose_name='Secret')),
|
||||||
('privileged', models.BooleanField(default=False, verbose_name='Privileged')),
|
('privileged', models.BooleanField(default=False, verbose_name='Privileged')),
|
||||||
('is_active', models.BooleanField(default=True, verbose_name='Is active')),
|
('is_active', models.BooleanField(default=True, verbose_name='Is active')),
|
||||||
|
|
|
@ -1,11 +1,13 @@
|
||||||
# Generated by Django 3.2.16 on 2022-12-30 08:08
|
# Generated by Django 3.2.16 on 2022-12-30 08:08
|
||||||
|
|
||||||
|
import uuid
|
||||||
|
|
||||||
|
import django.db.models.deletion
|
||||||
|
from django.conf import settings
|
||||||
|
from django.db import migrations, models
|
||||||
|
|
||||||
import common.db.encoder
|
import common.db.encoder
|
||||||
import common.db.fields
|
import common.db.fields
|
||||||
from django.conf import settings
|
|
||||||
from django.db import migrations, models
|
|
||||||
import django.db.models.deletion
|
|
||||||
import uuid
|
|
||||||
|
|
||||||
|
|
||||||
class Migration(migrations.Migration):
|
class Migration(migrations.Migration):
|
||||||
|
@ -53,7 +55,8 @@ class Migration(migrations.Migration):
|
||||||
primary_key=True, serialize=False, to='assets.baseautomation')),
|
primary_key=True, serialize=False, to='assets.baseautomation')),
|
||||||
('secret_type', models.CharField(
|
('secret_type', models.CharField(
|
||||||
choices=[('password', 'Password'), ('ssh_key', 'SSH key'), ('access_key', 'Access key'),
|
choices=[('password', 'Password'), ('ssh_key', 'SSH key'), ('access_key', 'Access key'),
|
||||||
('token', 'Token')], default='password', max_length=16, verbose_name='Secret type')),
|
('token', 'Token'), ('api_key', 'API key')], default='password', max_length=16,
|
||||||
|
verbose_name='Secret type')),
|
||||||
('secret_strategy', models.CharField(choices=[('specific', 'Specific password'),
|
('secret_strategy', models.CharField(choices=[('specific', 'Specific password'),
|
||||||
('random_one', 'All assets use the same random password'),
|
('random_one', 'All assets use the same random password'),
|
||||||
('random_all',
|
('random_all',
|
||||||
|
@ -156,7 +159,8 @@ class Migration(migrations.Migration):
|
||||||
primary_key=True, serialize=False, to='assets.baseautomation')),
|
primary_key=True, serialize=False, to='assets.baseautomation')),
|
||||||
('secret_type', models.CharField(
|
('secret_type', models.CharField(
|
||||||
choices=[('password', 'Password'), ('ssh_key', 'SSH key'), ('access_key', 'Access key'),
|
choices=[('password', 'Password'), ('ssh_key', 'SSH key'), ('access_key', 'Access key'),
|
||||||
('token', 'Token')], default='password', max_length=16, verbose_name='Secret type')),
|
('token', 'Token'), ('api_key', 'API key')], default='password', max_length=16,
|
||||||
|
verbose_name='Secret type')),
|
||||||
('secret_strategy', models.CharField(choices=[('specific', 'Specific password'),
|
('secret_strategy', models.CharField(choices=[('specific', 'Specific password'),
|
||||||
('random_one', 'All assets use the same random password'),
|
('random_one', 'All assets use the same random password'),
|
||||||
('random_all',
|
('random_all',
|
||||||
|
|
|
@ -1,3 +1,3 @@
|
||||||
from .base import *
|
|
||||||
from .account import *
|
from .account import *
|
||||||
from .automations import *
|
from .automations import *
|
||||||
|
from .base import *
|
||||||
|
|
|
@ -88,20 +88,33 @@ class Account(AbsConnectivity, BaseAccount):
|
||||||
def has_secret(self):
|
def has_secret(self):
|
||||||
return bool(self.secret)
|
return bool(self.secret)
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def get_special_account(cls, name):
|
||||||
|
if name == AliasAccount.INPUT.value:
|
||||||
|
return cls.get_manual_account()
|
||||||
|
elif name == AliasAccount.ANON.value:
|
||||||
|
return cls.get_anonymous_account()
|
||||||
|
else:
|
||||||
|
return cls(name=name, username=name, secret=None)
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def get_manual_account(cls):
|
def get_manual_account(cls):
|
||||||
""" @INPUT 手动登录的账号(any) """
|
""" @INPUT 手动登录的账号(any) """
|
||||||
return cls(name=AliasAccount.INPUT.label, username=AliasAccount.INPUT.value, secret=None)
|
return cls(name=AliasAccount.INPUT.label, username=AliasAccount.INPUT.value, secret=None)
|
||||||
|
|
||||||
@lazyproperty
|
@classmethod
|
||||||
def versions(self):
|
def get_anonymous_account(cls):
|
||||||
return self.history.count()
|
return cls(name=AliasAccount.ANON.label, username=AliasAccount.ANON.value, secret=None)
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def get_user_account(cls):
|
def get_user_account(cls):
|
||||||
""" @USER 动态用户的账号(self) """
|
""" @USER 动态用户的账号(self) """
|
||||||
return cls(name=AliasAccount.USER.label, username=AliasAccount.USER.value, secret=None)
|
return cls(name=AliasAccount.USER.label, username=AliasAccount.USER.value, secret=None)
|
||||||
|
|
||||||
|
@lazyproperty
|
||||||
|
def versions(self):
|
||||||
|
return self.history.count()
|
||||||
|
|
||||||
def get_su_from_accounts(self):
|
def get_su_from_accounts(self):
|
||||||
""" 排除自己和以自己为 su-from 的账号 """
|
""" 排除自己和以自己为 su-from 的账号 """
|
||||||
return self.asset.accounts.exclude(id=self.id).exclude(su_from=self)
|
return self.asset.accounts.exclude(id=self.id).exclude(su_from=self)
|
||||||
|
@ -124,10 +137,13 @@ class AccountTemplate(BaseAccount):
|
||||||
]
|
]
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def get_su_from_account_templates(cls, instance=None):
|
def get_su_from_account_templates(cls, pk=None):
|
||||||
if not instance:
|
if pk is None:
|
||||||
return cls.objects.all()
|
return cls.objects.all()
|
||||||
return cls.objects.exclude(Q(id=instance.id) | Q(su_from=instance))
|
return cls.objects.exclude(Q(id=pk) | Q(su_from_id=pk))
|
||||||
|
|
||||||
|
def __str__(self):
|
||||||
|
return f'{self.name}({self.username})'
|
||||||
|
|
||||||
def get_su_from_account(self, asset):
|
def get_su_from_account(self, asset):
|
||||||
su_from = self.su_from
|
su_from = self.su_from
|
||||||
|
|
|
@ -78,5 +78,8 @@ class BaseAccountSerializer(AuthValidateMixin, BulkOrgResourceModelSerializer):
|
||||||
]
|
]
|
||||||
extra_kwargs = {
|
extra_kwargs = {
|
||||||
'spec_info': {'label': _('Spec info')},
|
'spec_info': {'label': _('Spec info')},
|
||||||
'username': {'help_text': _("Tip: If no username is required for authentication, fill in `null`")}
|
'username': {'help_text': _(
|
||||||
|
"Tip: If no username is required for authentication, fill in `null`, "
|
||||||
|
"If AD account, like `username@domain`"
|
||||||
|
)},
|
||||||
}
|
}
|
||||||
|
|
|
@ -63,15 +63,17 @@ class AutomationExecutionSerializer(serializers.ModelSerializer):
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def get_snapshot(obj):
|
def get_snapshot(obj):
|
||||||
tp = obj.snapshot['type']
|
tp = obj.snapshot.get('type', '')
|
||||||
|
type_display = tp if not hasattr(AutomationTypes, tp) \
|
||||||
|
else getattr(AutomationTypes, tp).label
|
||||||
snapshot = {
|
snapshot = {
|
||||||
'type': tp,
|
'type': tp,
|
||||||
'name': obj.snapshot['name'],
|
'name': obj.snapshot.get('name'),
|
||||||
'comment': obj.snapshot['comment'],
|
'comment': obj.snapshot.get('comment'),
|
||||||
'accounts': obj.snapshot['accounts'],
|
'accounts': obj.snapshot.get('accounts'),
|
||||||
'node_amount': len(obj.snapshot['nodes']),
|
'node_amount': len(obj.snapshot.get('nodes', [])),
|
||||||
'asset_amount': len(obj.snapshot['assets']),
|
'asset_amount': len(obj.snapshot.get('assets', [])),
|
||||||
'type_display': getattr(AutomationTypes, tp).label,
|
'type_display': type_display,
|
||||||
}
|
}
|
||||||
return snapshot
|
return snapshot
|
||||||
|
|
||||||
|
|
|
@ -50,7 +50,7 @@ class ChangeSecretAutomationSerializer(AuthValidateMixin, BaseAutomationSerializ
|
||||||
read_only_fields = BaseAutomationSerializer.Meta.read_only_fields
|
read_only_fields = BaseAutomationSerializer.Meta.read_only_fields
|
||||||
fields = BaseAutomationSerializer.Meta.fields + read_only_fields + [
|
fields = BaseAutomationSerializer.Meta.fields + read_only_fields + [
|
||||||
'secret_type', 'secret_strategy', 'secret', 'password_rules',
|
'secret_type', 'secret_strategy', 'secret', 'password_rules',
|
||||||
'ssh_key_change_strategy', 'passphrase', 'recipients',
|
'ssh_key_change_strategy', 'passphrase', 'recipients', 'params'
|
||||||
]
|
]
|
||||||
extra_kwargs = {**BaseAutomationSerializer.Meta.extra_kwargs, **{
|
extra_kwargs = {**BaseAutomationSerializer.Meta.extra_kwargs, **{
|
||||||
'accounts': {'required': True},
|
'accounts': {'required': True},
|
||||||
|
|
|
@ -10,7 +10,7 @@ class PushAccountAutomationSerializer(ChangeSecretAutomationSerializer):
|
||||||
|
|
||||||
class Meta(ChangeSecretAutomationSerializer.Meta):
|
class Meta(ChangeSecretAutomationSerializer.Meta):
|
||||||
model = PushAccountAutomation
|
model = PushAccountAutomation
|
||||||
fields = ['params'] + [
|
fields = [
|
||||||
n for n in ChangeSecretAutomationSerializer.Meta.fields
|
n for n in ChangeSecretAutomationSerializer.Meta.fields
|
||||||
if n not in ['recipients']
|
if n not in ['recipients']
|
||||||
]
|
]
|
||||||
|
|
|
@ -39,7 +39,7 @@ urlpatterns = [
|
||||||
|
|
||||||
path('push-account/<uuid:pk>/asset/remove/', api.PushAccountRemoveAssetApi.as_view(),
|
path('push-account/<uuid:pk>/asset/remove/', api.PushAccountRemoveAssetApi.as_view(),
|
||||||
name='push-account-remove-asset'),
|
name='push-account-remove-asset'),
|
||||||
path('push-accountt/<uuid:pk>/asset/add/', api.PushAccountAddAssetApi.as_view(), name='push-account-add-asset'),
|
path('push-account/<uuid:pk>/asset/add/', api.PushAccountAddAssetApi.as_view(), name='push-account-add-asset'),
|
||||||
path('push-account/<uuid:pk>/nodes/', api.PushAccountNodeAddRemoveApi.as_view(),
|
path('push-account/<uuid:pk>/nodes/', api.PushAccountNodeAddRemoveApi.as_view(),
|
||||||
name='push-account-add-or-remove-node'),
|
name='push-account-add-or-remove-node'),
|
||||||
path('push-account/<uuid:pk>/assets/', api.PushAccountAssetsListApi.as_view(), name='push-account-assets'),
|
path('push-account/<uuid:pk>/assets/', api.PushAccountAssetsListApi.as_view(), name='push-account-assets'),
|
||||||
|
|
|
@ -4,7 +4,7 @@ from rest_framework import serializers
|
||||||
from accounts.const import (
|
from accounts.const import (
|
||||||
SecretType, DEFAULT_PASSWORD_RULES
|
SecretType, DEFAULT_PASSWORD_RULES
|
||||||
)
|
)
|
||||||
from common.utils import gen_key_pair, random_string
|
from common.utils import ssh_key_gen, random_string
|
||||||
from common.utils import validate_ssh_private_key, parse_ssh_private_key_str
|
from common.utils import validate_ssh_private_key, parse_ssh_private_key_str
|
||||||
|
|
||||||
|
|
||||||
|
@ -16,7 +16,7 @@ class SecretGenerator:
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def generate_ssh_key():
|
def generate_ssh_key():
|
||||||
private_key, public_key = gen_key_pair()
|
private_key, public_key = ssh_key_gen()
|
||||||
return private_key
|
return private_key
|
||||||
|
|
||||||
def generate_password(self):
|
def generate_password(self):
|
||||||
|
|
|
@ -0,0 +1,9 @@
|
||||||
|
from django.db import models
|
||||||
|
from django.utils.translation import gettext_lazy as _
|
||||||
|
|
||||||
|
|
||||||
|
class ActionChoices(models.TextChoices):
|
||||||
|
reject = 'reject', _('Reject')
|
||||||
|
accept = 'accept', _('Accept')
|
||||||
|
review = 'review', _('Review')
|
||||||
|
warning = 'warning', _('Warning')
|
|
@ -1,5 +1,4 @@
|
||||||
# Generated by Django 3.2.17 on 2023-06-06 10:57
|
# Generated by Django 3.2.17 on 2023-06-06 10:57
|
||||||
from collections import defaultdict
|
|
||||||
|
|
||||||
from django.db import migrations, models
|
from django.db import migrations, models
|
||||||
|
|
||||||
|
@ -8,17 +7,20 @@ import common.db.fields
|
||||||
|
|
||||||
def migrate_users_login_acls(apps, schema_editor):
|
def migrate_users_login_acls(apps, schema_editor):
|
||||||
login_acl_model = apps.get_model('acls', 'LoginACL')
|
login_acl_model = apps.get_model('acls', 'LoginACL')
|
||||||
name_used = defaultdict(int)
|
|
||||||
|
|
||||||
for login_acl in login_acl_model.objects.all():
|
name_used = []
|
||||||
name = login_acl.name
|
login_acls = []
|
||||||
if name_used[name] > 0:
|
for login_acl in login_acl_model.objects.all().select_related('user'):
|
||||||
login_acl.name += "_{}".format(name_used[name])
|
name = '{}_{}'.format(login_acl.name, login_acl.user.username)
|
||||||
name_used[name] += 1
|
if name.lower() in name_used:
|
||||||
|
name += '_{}'.format(str(login_acl.user_id)[:4])
|
||||||
|
name_used.append(name.lower())
|
||||||
|
login_acl.name = name
|
||||||
login_acl.users = {
|
login_acl.users = {
|
||||||
"type": "ids", "ids": [str(login_acl.user_id)]
|
"type": "ids", "ids": [str(login_acl.user_id)]
|
||||||
}
|
}
|
||||||
login_acl.save()
|
login_acls.append(login_acl)
|
||||||
|
login_acl_model.objects.bulk_update(login_acls, ['name', 'users'])
|
||||||
|
|
||||||
|
|
||||||
class Migration(migrations.Migration):
|
class Migration(migrations.Migration):
|
||||||
|
|
|
@ -7,6 +7,7 @@ from common.db.models import JMSBaseModel
|
||||||
from common.utils import contains_ip
|
from common.utils import contains_ip
|
||||||
from common.utils.time_period import contains_time_period
|
from common.utils.time_period import contains_time_period
|
||||||
from orgs.mixins.models import OrgModelMixin, OrgManager
|
from orgs.mixins.models import OrgModelMixin, OrgManager
|
||||||
|
from ..const import ActionChoices
|
||||||
|
|
||||||
__all__ = [
|
__all__ = [
|
||||||
'BaseACL', 'UserBaseACL', 'UserAssetAccountBaseACL',
|
'BaseACL', 'UserBaseACL', 'UserAssetAccountBaseACL',
|
||||||
|
@ -16,12 +17,6 @@ from orgs.utils import tmp_to_root_org
|
||||||
from orgs.utils import tmp_to_org
|
from orgs.utils import tmp_to_org
|
||||||
|
|
||||||
|
|
||||||
class ActionChoices(models.TextChoices):
|
|
||||||
reject = 'reject', _('Reject')
|
|
||||||
accept = 'accept', _('Accept')
|
|
||||||
review = 'review', _('Review')
|
|
||||||
|
|
||||||
|
|
||||||
class BaseACLQuerySet(models.QuerySet):
|
class BaseACLQuerySet(models.QuerySet):
|
||||||
def active(self):
|
def active(self):
|
||||||
return self.filter(is_active=True)
|
return self.filter(is_active=True)
|
||||||
|
|
|
@ -1,10 +1,11 @@
|
||||||
from django.utils.translation import ugettext_lazy as _
|
from django.utils.translation import ugettext_lazy as _
|
||||||
from rest_framework import serializers
|
from rest_framework import serializers
|
||||||
|
|
||||||
from acls.models.base import ActionChoices, BaseACL
|
from acls.models.base import BaseACL
|
||||||
from common.serializers.fields import JSONManyToManyField, LabeledChoiceField
|
from common.serializers.fields import JSONManyToManyField, LabeledChoiceField
|
||||||
from jumpserver.utils import has_valid_xpack_license
|
from jumpserver.utils import has_valid_xpack_license
|
||||||
from orgs.models import Organization
|
from orgs.models import Organization
|
||||||
|
from ..const import ActionChoices
|
||||||
|
|
||||||
common_help_text = _(
|
common_help_text = _(
|
||||||
"With * indicating a match all. "
|
"With * indicating a match all. "
|
||||||
|
@ -60,18 +61,21 @@ class ActionAclSerializer(serializers.Serializer):
|
||||||
super().__init__(*args, **kwargs)
|
super().__init__(*args, **kwargs)
|
||||||
self.set_action_choices()
|
self.set_action_choices()
|
||||||
|
|
||||||
def set_action_choices(self):
|
|
||||||
action = self.fields.get("action")
|
|
||||||
if not action:
|
|
||||||
return
|
|
||||||
choices = action.choices
|
|
||||||
if not has_valid_xpack_license():
|
|
||||||
choices.pop(ActionChoices.review, None)
|
|
||||||
action._choices = choices
|
|
||||||
|
|
||||||
|
|
||||||
class BaserACLSerializer(ActionAclSerializer, serializers.Serializer):
|
|
||||||
class Meta:
|
class Meta:
|
||||||
|
action_choices_exclude = [ActionChoices.warning]
|
||||||
|
|
||||||
|
def set_action_choices(self):
|
||||||
|
field_action = self.fields.get("action")
|
||||||
|
if not field_action:
|
||||||
|
return
|
||||||
|
if not has_valid_xpack_license():
|
||||||
|
field_action._choices.pop(ActionChoices.review, None)
|
||||||
|
for choice in self.Meta.action_choices_exclude:
|
||||||
|
field_action._choices.pop(choice, None)
|
||||||
|
|
||||||
|
|
||||||
|
class BaseACLSerializer(ActionAclSerializer, serializers.Serializer):
|
||||||
|
class Meta(ActionAclSerializer.Meta):
|
||||||
model = BaseACL
|
model = BaseACL
|
||||||
fields_mini = ["id", "name"]
|
fields_mini = ["id", "name"]
|
||||||
fields_small = fields_mini + [
|
fields_small = fields_mini + [
|
||||||
|
@ -84,6 +88,7 @@ class BaserACLSerializer(ActionAclSerializer, serializers.Serializer):
|
||||||
extra_kwargs = {
|
extra_kwargs = {
|
||||||
"priority": {"default": 50},
|
"priority": {"default": 50},
|
||||||
"is_active": {"default": True},
|
"is_active": {"default": True},
|
||||||
|
'reviewers': {'label': _('Recipients')},
|
||||||
}
|
}
|
||||||
|
|
||||||
def validate_reviewers(self, reviewers):
|
def validate_reviewers(self, reviewers):
|
||||||
|
@ -107,16 +112,16 @@ class BaserACLSerializer(ActionAclSerializer, serializers.Serializer):
|
||||||
return valid_reviewers
|
return valid_reviewers
|
||||||
|
|
||||||
|
|
||||||
class BaserUserACLSerializer(BaserACLSerializer):
|
class BaseUserACLSerializer(BaseACLSerializer):
|
||||||
users = JSONManyToManyField(label=_('User'))
|
users = JSONManyToManyField(label=_('User'))
|
||||||
|
|
||||||
class Meta(BaserACLSerializer.Meta):
|
class Meta(BaseACLSerializer.Meta):
|
||||||
fields = BaserACLSerializer.Meta.fields + ['users']
|
fields = BaseACLSerializer.Meta.fields + ['users']
|
||||||
|
|
||||||
|
|
||||||
class BaseUserAssetAccountACLSerializer(BaserUserACLSerializer):
|
class BaseUserAssetAccountACLSerializer(BaseUserACLSerializer):
|
||||||
assets = JSONManyToManyField(label=_('Asset'))
|
assets = JSONManyToManyField(label=_('Asset'))
|
||||||
accounts = serializers.ListField(label=_('Account'))
|
accounts = serializers.ListField(label=_('Account'))
|
||||||
|
|
||||||
class Meta(BaserUserACLSerializer.Meta):
|
class Meta(BaseUserACLSerializer.Meta):
|
||||||
fields = BaserUserACLSerializer.Meta.fields + ['assets', 'accounts']
|
fields = BaseUserACLSerializer.Meta.fields + ['assets', 'accounts']
|
||||||
|
|
|
@ -31,6 +31,8 @@ class CommandFilterACLSerializer(BaseSerializer, BulkOrgResourceModelSerializer)
|
||||||
class Meta(BaseSerializer.Meta):
|
class Meta(BaseSerializer.Meta):
|
||||||
model = CommandFilterACL
|
model = CommandFilterACL
|
||||||
fields = BaseSerializer.Meta.fields + ['command_groups']
|
fields = BaseSerializer.Meta.fields + ['command_groups']
|
||||||
|
# 默认都支持所有的 actions
|
||||||
|
action_choices_exclude = []
|
||||||
|
|
||||||
|
|
||||||
class CommandReviewSerializer(serializers.Serializer):
|
class CommandReviewSerializer(serializers.Serializer):
|
||||||
|
|
|
@ -1,6 +1,7 @@
|
||||||
from orgs.mixins.serializers import BulkOrgResourceModelSerializer
|
from orgs.mixins.serializers import BulkOrgResourceModelSerializer
|
||||||
from .base import BaseUserAssetAccountACLSerializer as BaseSerializer
|
from .base import BaseUserAssetAccountACLSerializer as BaseSerializer
|
||||||
from ..models import ConnectMethodACL
|
from ..models import ConnectMethodACL
|
||||||
|
from ..const import ActionChoices
|
||||||
|
|
||||||
__all__ = ["ConnectMethodACLSerializer"]
|
__all__ = ["ConnectMethodACLSerializer"]
|
||||||
|
|
||||||
|
@ -12,12 +13,6 @@ class ConnectMethodACLSerializer(BaseSerializer, BulkOrgResourceModelSerializer)
|
||||||
i for i in BaseSerializer.Meta.fields + ['connect_methods']
|
i for i in BaseSerializer.Meta.fields + ['connect_methods']
|
||||||
if i not in ['assets', 'accounts']
|
if i not in ['assets', 'accounts']
|
||||||
]
|
]
|
||||||
|
action_choices_exclude = BaseSerializer.Meta.action_choices_exclude + [
|
||||||
def __init__(self, *args, **kwargs):
|
ActionChoices.review, ActionChoices.accept
|
||||||
super().__init__(*args, **kwargs)
|
]
|
||||||
field_action = self.fields.get('action')
|
|
||||||
if not field_action:
|
|
||||||
return
|
|
||||||
# 仅支持拒绝
|
|
||||||
for k in ['review', 'accept']:
|
|
||||||
field_action._choices.pop(k, None)
|
|
||||||
|
|
|
@ -2,7 +2,7 @@ from django.utils.translation import ugettext as _
|
||||||
|
|
||||||
from common.serializers import MethodSerializer
|
from common.serializers import MethodSerializer
|
||||||
from orgs.mixins.serializers import BulkOrgResourceModelSerializer
|
from orgs.mixins.serializers import BulkOrgResourceModelSerializer
|
||||||
from .base import BaserUserACLSerializer
|
from .base import BaseUserACLSerializer
|
||||||
from .rules import RuleSerializer
|
from .rules import RuleSerializer
|
||||||
from ..models import LoginACL
|
from ..models import LoginACL
|
||||||
|
|
||||||
|
@ -11,12 +11,12 @@ __all__ = ["LoginACLSerializer"]
|
||||||
common_help_text = _("With * indicating a match all. ")
|
common_help_text = _("With * indicating a match all. ")
|
||||||
|
|
||||||
|
|
||||||
class LoginACLSerializer(BaserUserACLSerializer, BulkOrgResourceModelSerializer):
|
class LoginACLSerializer(BaseUserACLSerializer, BulkOrgResourceModelSerializer):
|
||||||
rules = MethodSerializer(label=_('Rule'))
|
rules = MethodSerializer(label=_('Rule'))
|
||||||
|
|
||||||
class Meta(BaserUserACLSerializer.Meta):
|
class Meta(BaseUserACLSerializer.Meta):
|
||||||
model = LoginACL
|
model = LoginACL
|
||||||
fields = BaserUserACLSerializer.Meta.fields + ['rules', ]
|
fields = BaseUserACLSerializer.Meta.fields + ['rules', ]
|
||||||
|
|
||||||
def get_rules_serializer(self):
|
def get_rules_serializer(self):
|
||||||
return RuleSerializer()
|
return RuleSerializer()
|
||||||
|
|
|
@ -3,6 +3,7 @@ from .cloud import *
|
||||||
from .custom import *
|
from .custom import *
|
||||||
from .database import *
|
from .database import *
|
||||||
from .device import *
|
from .device import *
|
||||||
|
from .gpt import *
|
||||||
from .host import *
|
from .host import *
|
||||||
from .permission import *
|
from .permission import *
|
||||||
from .web import *
|
from .web import *
|
||||||
|
|
|
@ -82,7 +82,7 @@ class AssetFilterSet(BaseFilterSet):
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def filter_protocols(queryset, name, value):
|
def filter_protocols(queryset, name, value):
|
||||||
value = value.split(',')
|
value = value.split(',')
|
||||||
return queryset.filter(protocols__name__in=value)
|
return queryset.filter(protocols__name__in=value).distinct()
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def filter_labels(queryset, name, value):
|
def filter_labels(queryset, name, value):
|
||||||
|
@ -91,7 +91,7 @@ class AssetFilterSet(BaseFilterSet):
|
||||||
queryset = queryset.filter(labels__name=n, labels__value=v)
|
queryset = queryset.filter(labels__name=n, labels__value=v)
|
||||||
else:
|
else:
|
||||||
q = Q(labels__name__contains=value) | Q(labels__value__contains=value)
|
q = Q(labels__name__contains=value) | Q(labels__value__contains=value)
|
||||||
queryset = queryset.filter(q)
|
queryset = queryset.filter(q).distinct()
|
||||||
return queryset
|
return queryset
|
||||||
|
|
||||||
|
|
||||||
|
@ -121,6 +121,14 @@ class AssetViewSet(SuggestionMixin, NodeFilterMixin, OrgBulkModelViewSet):
|
||||||
NodeFilterBackend, AttrRulesFilterBackend
|
NodeFilterBackend, AttrRulesFilterBackend
|
||||||
]
|
]
|
||||||
|
|
||||||
|
def get_queryset(self):
|
||||||
|
queryset = super().get_queryset() \
|
||||||
|
.prefetch_related('nodes', 'protocols') \
|
||||||
|
.select_related('platform', 'domain')
|
||||||
|
if queryset.model is not Asset:
|
||||||
|
queryset = queryset.select_related('asset_ptr')
|
||||||
|
return queryset
|
||||||
|
|
||||||
def get_serializer_class(self):
|
def get_serializer_class(self):
|
||||||
cls = super().get_serializer_class()
|
cls = super().get_serializer_class()
|
||||||
if self.action == "retrieve":
|
if self.action == "retrieve":
|
||||||
|
|
|
@ -0,0 +1,16 @@
|
||||||
|
from assets.models import GPT, Asset
|
||||||
|
from assets.serializers import GPTSerializer
|
||||||
|
|
||||||
|
from .asset import AssetViewSet
|
||||||
|
|
||||||
|
__all__ = ['GPTViewSet']
|
||||||
|
|
||||||
|
|
||||||
|
class GPTViewSet(AssetViewSet):
|
||||||
|
model = GPT
|
||||||
|
perm_model = Asset
|
||||||
|
|
||||||
|
def get_serializer_classes(self):
|
||||||
|
serializer_classes = super().get_serializer_classes()
|
||||||
|
serializer_classes['default'] = GPTSerializer
|
||||||
|
return serializer_classes
|
|
@ -1,11 +1,11 @@
|
||||||
from rest_framework.mixins import ListModelMixin
|
|
||||||
from rest_framework.decorators import action
|
from rest_framework.decorators import action
|
||||||
|
from rest_framework.mixins import ListModelMixin
|
||||||
from rest_framework.response import Response
|
from rest_framework.response import Response
|
||||||
|
|
||||||
|
from assets.const import AllTypes
|
||||||
|
from assets.serializers import CategorySerializer, TypeSerializer
|
||||||
from common.api import JMSGenericViewSet
|
from common.api import JMSGenericViewSet
|
||||||
from common.permissions import IsValidUser
|
from common.permissions import IsValidUser
|
||||||
from assets.serializers import CategorySerializer, TypeSerializer
|
|
||||||
from assets.const import AllTypes
|
|
||||||
|
|
||||||
__all__ = ['CategoryViewSet']
|
__all__ = ['CategoryViewSet']
|
||||||
|
|
||||||
|
@ -32,4 +32,3 @@ class CategoryViewSet(ListModelMixin, JMSGenericViewSet):
|
||||||
tp = request.query_params.get('type')
|
tp = request.query_params.get('type')
|
||||||
constraints = AllTypes.get_constraints(category, tp)
|
constraints = AllTypes.get_constraints(category, tp)
|
||||||
return Response(constraints)
|
return Response(constraints)
|
||||||
|
|
||||||
|
|
|
@ -26,6 +26,8 @@ class DomainViewSet(OrgBulkModelViewSet):
|
||||||
return serializers.DomainWithGatewaySerializer
|
return serializers.DomainWithGatewaySerializer
|
||||||
return serializers.DomainSerializer
|
return serializers.DomainSerializer
|
||||||
|
|
||||||
|
def get_queryset(self):
|
||||||
|
return super().get_queryset().prefetch_related('assets')
|
||||||
|
|
||||||
class GatewayViewSet(HostViewSet):
|
class GatewayViewSet(HostViewSet):
|
||||||
perm_model = Gateway
|
perm_model = Gateway
|
||||||
|
|
|
@ -38,5 +38,6 @@ class LabelViewSet(OrgBulkModelViewSet):
|
||||||
return super().list(request, *args, **kwargs)
|
return super().list(request, *args, **kwargs)
|
||||||
|
|
||||||
def get_queryset(self):
|
def get_queryset(self):
|
||||||
self.queryset = Label.objects.annotate(asset_count=Count("assets"))
|
self.queryset = Label.objects.prefetch_related(
|
||||||
|
'assets').annotate(asset_count=Count("assets"))
|
||||||
return self.queryset
|
return self.queryset
|
||||||
|
|
|
@ -4,20 +4,20 @@ from rest_framework.decorators import action
|
||||||
from rest_framework.response import Response
|
from rest_framework.response import Response
|
||||||
|
|
||||||
from assets.const import AllTypes
|
from assets.const import AllTypes
|
||||||
from assets.models import Platform, Node, Asset
|
from assets.models import Platform, Node, Asset, PlatformProtocol
|
||||||
from assets.serializers import PlatformSerializer
|
from assets.serializers import PlatformSerializer, PlatformProtocolSerializer
|
||||||
from common.api import JMSModelViewSet
|
from common.api import JMSModelViewSet
|
||||||
from common.permissions import IsValidUser
|
from common.permissions import IsValidUser
|
||||||
from common.serializers import GroupedChoiceSerializer
|
from common.serializers import GroupedChoiceSerializer
|
||||||
|
|
||||||
__all__ = ['AssetPlatformViewSet', 'PlatformAutomationMethodsApi']
|
__all__ = ['AssetPlatformViewSet', 'PlatformAutomationMethodsApi', 'PlatformProtocolViewSet']
|
||||||
|
|
||||||
|
|
||||||
class AssetPlatformViewSet(JMSModelViewSet):
|
class AssetPlatformViewSet(JMSModelViewSet):
|
||||||
queryset = Platform.objects.all()
|
queryset = Platform.objects.all()
|
||||||
serializer_classes = {
|
serializer_classes = {
|
||||||
'default': PlatformSerializer,
|
'default': PlatformSerializer,
|
||||||
'categories': GroupedChoiceSerializer
|
'categories': GroupedChoiceSerializer,
|
||||||
}
|
}
|
||||||
filterset_fields = ['name', 'category', 'type']
|
filterset_fields = ['name', 'category', 'type']
|
||||||
search_fields = ['name']
|
search_fields = ['name']
|
||||||
|
@ -25,7 +25,7 @@ class AssetPlatformViewSet(JMSModelViewSet):
|
||||||
'categories': 'assets.view_platform',
|
'categories': 'assets.view_platform',
|
||||||
'type_constraints': 'assets.view_platform',
|
'type_constraints': 'assets.view_platform',
|
||||||
'ops_methods': 'assets.view_platform',
|
'ops_methods': 'assets.view_platform',
|
||||||
'filter_nodes_assets': 'assets.view_platform'
|
'filter_nodes_assets': 'assets.view_platform',
|
||||||
}
|
}
|
||||||
|
|
||||||
def get_queryset(self):
|
def get_queryset(self):
|
||||||
|
@ -61,6 +61,15 @@ class AssetPlatformViewSet(JMSModelViewSet):
|
||||||
return Response(serializer.data)
|
return Response(serializer.data)
|
||||||
|
|
||||||
|
|
||||||
|
class PlatformProtocolViewSet(JMSModelViewSet):
|
||||||
|
queryset = PlatformProtocol.objects.all()
|
||||||
|
serializer_class = PlatformProtocolSerializer
|
||||||
|
filterset_fields = ['name', 'platform__name']
|
||||||
|
rbac_perms = {
|
||||||
|
'*': 'assets.add_platform'
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
class PlatformAutomationMethodsApi(generics.ListAPIView):
|
class PlatformAutomationMethodsApi(generics.ListAPIView):
|
||||||
permission_classes = (IsValidUser,)
|
permission_classes = (IsValidUser,)
|
||||||
|
|
||||||
|
|
|
@ -127,10 +127,13 @@ class NodeChildrenAsTreeApi(SerializeToTreeNodeMixin, NodeChildrenApi):
|
||||||
if not self.instance or not include_assets:
|
if not self.instance or not include_assets:
|
||||||
return Asset.objects.none()
|
return Asset.objects.none()
|
||||||
if query_all:
|
if query_all:
|
||||||
assets = self.instance.get_all_assets_for_tree()
|
assets = self.instance.get_all_assets()
|
||||||
else:
|
else:
|
||||||
assets = self.instance.get_assets_for_tree()
|
assets = self.instance.get_assets()
|
||||||
return assets
|
return assets.only(
|
||||||
|
"id", "name", "address", "platform_id",
|
||||||
|
"org_id", "is_active", 'comment'
|
||||||
|
).prefetch_related('platform')
|
||||||
|
|
||||||
def filter_queryset_for_assets(self, assets):
|
def filter_queryset_for_assets(self, assets):
|
||||||
search = self.request.query_params.get('search')
|
search = self.request.query_params.get('search')
|
||||||
|
|
|
@ -0,0 +1,15 @@
|
||||||
|
- hosts: custom
|
||||||
|
gather_facts: no
|
||||||
|
vars:
|
||||||
|
ansible_shell_type: sh
|
||||||
|
ansible_connection: local
|
||||||
|
|
||||||
|
tasks:
|
||||||
|
- name: Test asset connection (pyfreerdp)
|
||||||
|
rdp_ping:
|
||||||
|
login_user: "{{ jms_account.username }}"
|
||||||
|
login_password: "{{ jms_account.secret }}"
|
||||||
|
login_host: "{{ jms_asset.address }}"
|
||||||
|
login_port: "{{ jms_asset.port }}"
|
||||||
|
login_secret_type: "{{ jms_account.secret_type }}"
|
||||||
|
login_private_key_path: "{{ jms_account.private_key_path }}"
|
|
@ -0,0 +1,13 @@
|
||||||
|
id: ping_by_rdp
|
||||||
|
name: "{{ 'Ping by pyfreerdp' | trans }}"
|
||||||
|
category:
|
||||||
|
- device
|
||||||
|
- host
|
||||||
|
type:
|
||||||
|
- windows
|
||||||
|
method: ping
|
||||||
|
i18n:
|
||||||
|
Ping by pyfreerdp:
|
||||||
|
zh: 使用 Python 模块 pyfreerdp 测试主机可连接性
|
||||||
|
en: Ping by pyfreerdp module
|
||||||
|
ja: Pyfreerdpモジュールを使用してホストにPingする
|
|
@ -4,7 +4,7 @@
|
||||||
ansible_connection: local
|
ansible_connection: local
|
||||||
|
|
||||||
tasks:
|
tasks:
|
||||||
- name: Test asset connection
|
- name: Test asset connection (paramiko)
|
||||||
ssh_ping:
|
ssh_ping:
|
||||||
login_user: "{{ jms_account.username }}"
|
login_user: "{{ jms_account.username }}"
|
||||||
login_password: "{{ jms_account.secret }}"
|
login_password: "{{ jms_account.secret }}"
|
|
@ -1,7 +1,8 @@
|
||||||
|
from django.db import models
|
||||||
from django.db.models import TextChoices
|
from django.db.models import TextChoices
|
||||||
|
from django.utils.translation import gettext_lazy as _
|
||||||
|
|
||||||
from jumpserver.utils import has_valid_xpack_license
|
from jumpserver.utils import has_valid_xpack_license
|
||||||
from .protocol import Protocol
|
|
||||||
|
|
||||||
|
|
||||||
class Type:
|
class Type:
|
||||||
|
@ -28,6 +29,12 @@ class Type:
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class FillType(models.TextChoices):
|
||||||
|
no = 'no', _('Disabled')
|
||||||
|
basic = 'basic', _('Basic')
|
||||||
|
script = 'script', _('Script')
|
||||||
|
|
||||||
|
|
||||||
class BaseType(TextChoices):
|
class BaseType(TextChoices):
|
||||||
"""
|
"""
|
||||||
约束应该考虑代是对平台对限制,避免多余对选项,如: mysql 开启 ssh,
|
约束应该考虑代是对平台对限制,避免多余对选项,如: mysql 开启 ssh,
|
||||||
|
@ -49,7 +56,7 @@ class BaseType(TextChoices):
|
||||||
for k, v in cls.get_choices():
|
for k, v in cls.get_choices():
|
||||||
tp_base = {**base_default, **base.get(k, {})}
|
tp_base = {**base_default, **base.get(k, {})}
|
||||||
tp_auto = {**automation_default, **automation.get(k, {})}
|
tp_auto = {**automation_default, **automation.get(k, {})}
|
||||||
tp_protocols = {**protocols_default, **protocols.get(k, {})}
|
tp_protocols = {**protocols_default, **{'port_from_addr': False}, **protocols.get(k, {})}
|
||||||
tp_protocols = cls._parse_protocols(tp_protocols, k)
|
tp_protocols = cls._parse_protocols(tp_protocols, k)
|
||||||
tp_constrains = {**tp_base, 'protocols': tp_protocols, 'automation': tp_auto}
|
tp_constrains = {**tp_base, 'protocols': tp_protocols, 'automation': tp_auto}
|
||||||
constrains[k] = tp_constrains
|
constrains[k] = tp_constrains
|
||||||
|
@ -57,14 +64,20 @@ class BaseType(TextChoices):
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def _parse_protocols(cls, protocol, tp):
|
def _parse_protocols(cls, protocol, tp):
|
||||||
|
from .protocol import Protocol
|
||||||
settings = Protocol.settings()
|
settings = Protocol.settings()
|
||||||
choices = protocol.get('choices', [])
|
choices = protocol.get('choices', [])
|
||||||
if choices == '__self__':
|
if choices == '__self__':
|
||||||
choices = [tp]
|
choices = [tp]
|
||||||
protocols = [
|
|
||||||
{'name': name, **settings.get(name, {})}
|
protocols = []
|
||||||
for name in choices
|
for name in choices:
|
||||||
]
|
protocol = {'name': name, **settings.get(name, {})}
|
||||||
|
setting = protocol.pop('setting', {})
|
||||||
|
setting_values = {k: v.get('default', None) for k, v in setting.items()}
|
||||||
|
protocol['setting'] = setting_values
|
||||||
|
protocols.append(protocol)
|
||||||
|
|
||||||
if protocols:
|
if protocols:
|
||||||
protocols[0]['default'] = True
|
protocols[0]['default'] = True
|
||||||
return protocols
|
return protocols
|
||||||
|
|
|
@ -12,6 +12,7 @@ class Category(ChoicesMixin, models.TextChoices):
|
||||||
DATABASE = 'database', _("Database")
|
DATABASE = 'database', _("Database")
|
||||||
CLOUD = 'cloud', _("Cloud service")
|
CLOUD = 'cloud', _("Cloud service")
|
||||||
WEB = 'web', _("Web")
|
WEB = 'web', _("Web")
|
||||||
|
GPT = 'gpt', "GPT"
|
||||||
CUSTOM = 'custom', _("Custom type")
|
CUSTOM = 'custom', _("Custom type")
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
|
|
|
@ -1,3 +1,6 @@
|
||||||
|
from collections import defaultdict
|
||||||
|
|
||||||
|
from common.decorators import cached_method
|
||||||
from .base import BaseType
|
from .base import BaseType
|
||||||
|
|
||||||
|
|
||||||
|
@ -9,7 +12,8 @@ class CustomTypes(BaseType):
|
||||||
except Exception:
|
except Exception:
|
||||||
return []
|
return []
|
||||||
types = set([p.type for p in platforms])
|
types = set([p.type for p in platforms])
|
||||||
return [(t, t) for t in types]
|
choices = [(t, t) for t in types]
|
||||||
|
return choices
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def _get_base_constrains(cls) -> dict:
|
def _get_base_constrains(cls) -> dict:
|
||||||
|
@ -37,13 +41,20 @@ class CustomTypes(BaseType):
|
||||||
return constrains
|
return constrains
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
|
@cached_method(5)
|
||||||
def _get_protocol_constrains(cls) -> dict:
|
def _get_protocol_constrains(cls) -> dict:
|
||||||
constrains = {}
|
from assets.models import PlatformProtocol
|
||||||
for platform in cls.get_custom_platforms():
|
_constrains = defaultdict(set)
|
||||||
choices = list(platform.protocols.values_list('name', flat=True))
|
protocols = PlatformProtocol.objects \
|
||||||
if platform.type in constrains:
|
.filter(platform__category='custom') \
|
||||||
choices = constrains[platform.type]['choices'] + choices
|
.values_list('name', 'platform__type')
|
||||||
constrains[platform.type] = {'choices': choices}
|
for name, tp in protocols:
|
||||||
|
_constrains[tp].add(name)
|
||||||
|
|
||||||
|
constrains = {
|
||||||
|
tp: {'choices': list(choices)}
|
||||||
|
for tp, choices in _constrains.items()
|
||||||
|
}
|
||||||
return constrains
|
return constrains
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
|
@ -51,6 +62,8 @@ class CustomTypes(BaseType):
|
||||||
return {}
|
return {}
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
|
@cached_method(5)
|
||||||
def get_custom_platforms(cls):
|
def get_custom_platforms(cls):
|
||||||
from assets.models import Platform
|
from assets.models import Platform
|
||||||
return Platform.objects.filter(category='custom')
|
platforms = Platform.objects.filter(category='custom')
|
||||||
|
return platforms
|
||||||
|
|
|
@ -0,0 +1,54 @@
|
||||||
|
from django.utils.translation import gettext_lazy as _
|
||||||
|
|
||||||
|
from .base import BaseType
|
||||||
|
|
||||||
|
|
||||||
|
class GPTTypes(BaseType):
|
||||||
|
CHATGPT = 'chatgpt', _('ChatGPT')
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def _get_base_constrains(cls) -> dict:
|
||||||
|
return {
|
||||||
|
'*': {
|
||||||
|
'charset_enabled': False,
|
||||||
|
'domain_enabled': False,
|
||||||
|
'su_enabled': False,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def _get_automation_constrains(cls) -> dict:
|
||||||
|
constrains = {
|
||||||
|
'*': {
|
||||||
|
'ansible_enabled': False,
|
||||||
|
'ping_enabled': False,
|
||||||
|
'gather_facts_enabled': False,
|
||||||
|
'verify_account_enabled': False,
|
||||||
|
'change_secret_enabled': False,
|
||||||
|
'push_account_enabled': False,
|
||||||
|
'gather_accounts_enabled': False,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return constrains
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def _get_protocol_constrains(cls) -> dict:
|
||||||
|
return {
|
||||||
|
'*': {
|
||||||
|
'choices': '__self__',
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def internal_platforms(cls):
|
||||||
|
return {
|
||||||
|
cls.CHATGPT: [
|
||||||
|
{'name': 'ChatGPT'}
|
||||||
|
],
|
||||||
|
}
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def get_community_types(cls):
|
||||||
|
return [
|
||||||
|
cls.CHATGPT,
|
||||||
|
]
|
|
@ -1,6 +1,10 @@
|
||||||
|
from django.conf import settings
|
||||||
from django.db import models
|
from django.db import models
|
||||||
|
from django.utils.translation import gettext_lazy as _
|
||||||
|
|
||||||
from common.db.models import ChoicesMixin
|
from common.db.models import ChoicesMixin
|
||||||
|
from common.decorators import cached_method
|
||||||
|
from .base import FillType
|
||||||
|
|
||||||
__all__ = ['Protocol']
|
__all__ = ['Protocol']
|
||||||
|
|
||||||
|
@ -22,8 +26,9 @@ class Protocol(ChoicesMixin, models.TextChoices):
|
||||||
mongodb = 'mongodb', 'MongoDB'
|
mongodb = 'mongodb', 'MongoDB'
|
||||||
|
|
||||||
k8s = 'k8s', 'K8S'
|
k8s = 'k8s', 'K8S'
|
||||||
http = 'http', 'HTTP'
|
http = 'http', 'HTTP(s)'
|
||||||
_settings = None
|
|
||||||
|
chatgpt = 'chatgpt', 'ChatGPT'
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def device_protocols(cls):
|
def device_protocols(cls):
|
||||||
|
@ -32,16 +37,41 @@ class Protocol(ChoicesMixin, models.TextChoices):
|
||||||
'port': 22,
|
'port': 22,
|
||||||
'secret_types': ['password', 'ssh_key'],
|
'secret_types': ['password', 'ssh_key'],
|
||||||
'setting': {
|
'setting': {
|
||||||
'sftp_enabled': True,
|
'sftp_enabled': {
|
||||||
'sftp_home': '/tmp',
|
'type': 'bool',
|
||||||
|
'default': True,
|
||||||
|
'label': _('SFTP enabled')
|
||||||
|
},
|
||||||
|
'sftp_home': {
|
||||||
|
'type': 'str',
|
||||||
|
'default': '/tmp',
|
||||||
|
'label': _('SFTP home')
|
||||||
|
},
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
cls.rdp: {
|
cls.rdp: {
|
||||||
'port': 3389,
|
'port': 3389,
|
||||||
'secret_types': ['password'],
|
'secret_types': ['password'],
|
||||||
'setting': {
|
'setting': {
|
||||||
'console': False,
|
'console': {
|
||||||
'security': 'any',
|
'type': 'bool',
|
||||||
|
'default': False,
|
||||||
|
'label': _('Console'),
|
||||||
|
'help_text': _("Connect to console session")
|
||||||
|
},
|
||||||
|
'security': {
|
||||||
|
'type': 'choice',
|
||||||
|
'choices': [('any', _('Any')), ('rdp', 'RDP'), ('tls', 'TLS'), ('nla', 'NLA')],
|
||||||
|
'default': 'any',
|
||||||
|
'label': _('Security'),
|
||||||
|
'help_text': _("Security layer to use for the connection")
|
||||||
|
},
|
||||||
|
'ad_domain': {
|
||||||
|
'type': 'str',
|
||||||
|
'required': False,
|
||||||
|
'default': '',
|
||||||
|
'label': _('AD domain')
|
||||||
|
}
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
cls.vnc: {
|
cls.vnc: {
|
||||||
|
@ -56,7 +86,11 @@ class Protocol(ChoicesMixin, models.TextChoices):
|
||||||
'port': 5985,
|
'port': 5985,
|
||||||
'secret_types': ['password'],
|
'secret_types': ['password'],
|
||||||
'setting': {
|
'setting': {
|
||||||
'use_ssl': False,
|
'use_ssl': {
|
||||||
|
'type': 'bool',
|
||||||
|
'default': False,
|
||||||
|
'label': _('Use SSL')
|
||||||
|
},
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
|
@ -79,21 +113,25 @@ class Protocol(ChoicesMixin, models.TextChoices):
|
||||||
'port': 5432,
|
'port': 5432,
|
||||||
'required': True,
|
'required': True,
|
||||||
'secret_types': ['password'],
|
'secret_types': ['password'],
|
||||||
|
'xpack': True
|
||||||
},
|
},
|
||||||
cls.oracle: {
|
cls.oracle: {
|
||||||
'port': 1521,
|
'port': 1521,
|
||||||
'required': True,
|
'required': True,
|
||||||
'secret_types': ['password'],
|
'secret_types': ['password'],
|
||||||
|
'xpack': True
|
||||||
},
|
},
|
||||||
cls.sqlserver: {
|
cls.sqlserver: {
|
||||||
'port': 1433,
|
'port': 1433,
|
||||||
'required': True,
|
'required': True,
|
||||||
'secret_types': ['password'],
|
'secret_types': ['password'],
|
||||||
|
'xpack': True,
|
||||||
},
|
},
|
||||||
cls.clickhouse: {
|
cls.clickhouse: {
|
||||||
'port': 9000,
|
'port': 9000,
|
||||||
'required': True,
|
'required': True,
|
||||||
'secret_types': ['password'],
|
'secret_types': ['password'],
|
||||||
|
'xpack': True,
|
||||||
},
|
},
|
||||||
cls.mongodb: {
|
cls.mongodb: {
|
||||||
'port': 27017,
|
'port': 27017,
|
||||||
|
@ -105,7 +143,11 @@ class Protocol(ChoicesMixin, models.TextChoices):
|
||||||
'required': True,
|
'required': True,
|
||||||
'secret_types': ['password'],
|
'secret_types': ['password'],
|
||||||
'setting': {
|
'setting': {
|
||||||
'auth_username': True,
|
'auth_username': {
|
||||||
|
'type': 'bool',
|
||||||
|
'default': False,
|
||||||
|
'label': _('Auth username')
|
||||||
|
},
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
|
@ -115,32 +157,97 @@ class Protocol(ChoicesMixin, models.TextChoices):
|
||||||
return {
|
return {
|
||||||
cls.k8s: {
|
cls.k8s: {
|
||||||
'port': 443,
|
'port': 443,
|
||||||
|
'port_from_addr': True,
|
||||||
'required': True,
|
'required': True,
|
||||||
'secret_types': ['token'],
|
'secret_types': ['token'],
|
||||||
},
|
},
|
||||||
cls.http: {
|
cls.http: {
|
||||||
'port': 80,
|
'port': 80,
|
||||||
|
'port_from_addr': True,
|
||||||
'secret_types': ['password'],
|
'secret_types': ['password'],
|
||||||
'setting': {
|
'setting': {
|
||||||
'username_selector': 'name=username',
|
'autofill': {
|
||||||
'password_selector': 'name=password',
|
'label': _('Autofill'),
|
||||||
'submit_selector': 'id=login_button',
|
'type': 'choice',
|
||||||
|
'choices': FillType.choices,
|
||||||
|
'default': 'basic',
|
||||||
|
},
|
||||||
|
'username_selector': {
|
||||||
|
'type': 'str',
|
||||||
|
'default': 'name=username',
|
||||||
|
'label': _('Username selector')
|
||||||
|
},
|
||||||
|
'password_selector': {
|
||||||
|
'type': 'str',
|
||||||
|
'default': 'name=password',
|
||||||
|
'label': _('Password selector')
|
||||||
|
},
|
||||||
|
'submit_selector': {
|
||||||
|
'type': 'str',
|
||||||
|
'default': 'type=submit',
|
||||||
|
'label': _('Submit selector')
|
||||||
|
},
|
||||||
|
'script': {
|
||||||
|
'type': 'text',
|
||||||
|
'default': [],
|
||||||
|
'label': _('Script'),
|
||||||
|
}
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
|
def gpt_protocols(cls):
|
||||||
|
protocols = {
|
||||||
|
cls.chatgpt: {
|
||||||
|
'port': 443,
|
||||||
|
'required': True,
|
||||||
|
'port_from_addr': True,
|
||||||
|
'secret_types': ['api_key'],
|
||||||
|
'setting': {
|
||||||
|
'api_mode': {
|
||||||
|
'type': 'choice',
|
||||||
|
'default': 'gpt-3.5-turbo',
|
||||||
|
'label': _('API mode'),
|
||||||
|
'choices': [
|
||||||
|
('gpt-3.5-turbo', 'GPT-3.5 Turbo'),
|
||||||
|
('gpt-3.5-turbo-16k', 'GPT-3.5 Turbo 16K'),
|
||||||
|
]
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if settings.XPACK_ENABLED:
|
||||||
|
choices = protocols[cls.chatgpt]['setting']['api_mode']['choices']
|
||||||
|
choices.extend([
|
||||||
|
('gpt-4', 'GPT-4'),
|
||||||
|
('gpt-4-32k', 'GPT-4 32K'),
|
||||||
|
])
|
||||||
|
return protocols
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
@cached_method(ttl=600)
|
||||||
def settings(cls):
|
def settings(cls):
|
||||||
return {
|
return {
|
||||||
**cls.device_protocols(),
|
**cls.device_protocols(),
|
||||||
**cls.database_protocols(),
|
**cls.database_protocols(),
|
||||||
**cls.cloud_protocols()
|
**cls.cloud_protocols(),
|
||||||
|
**cls.gpt_protocols(),
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
@cached_method(ttl=600)
|
||||||
|
def xpack_protocols(cls):
|
||||||
|
return [
|
||||||
|
protocol
|
||||||
|
for protocol, config in cls.settings().items()
|
||||||
|
if config.get('xpack', False)
|
||||||
|
]
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def protocol_secret_types(cls):
|
def protocol_secret_types(cls):
|
||||||
settings = cls.settings()
|
configs = cls.settings()
|
||||||
return {
|
return {
|
||||||
protocol: settings[protocol]['secret_types'] or ['password']
|
protocol: configs[protocol]['secret_types'] or ['password']
|
||||||
for protocol in cls.settings()
|
for protocol in configs
|
||||||
}
|
}
|
||||||
|
|
|
@ -10,6 +10,7 @@ from .cloud import CloudTypes
|
||||||
from .custom import CustomTypes
|
from .custom import CustomTypes
|
||||||
from .database import DatabaseTypes
|
from .database import DatabaseTypes
|
||||||
from .device import DeviceTypes
|
from .device import DeviceTypes
|
||||||
|
from .gpt import GPTTypes
|
||||||
from .host import HostTypes
|
from .host import HostTypes
|
||||||
from .web import WebTypes
|
from .web import WebTypes
|
||||||
|
|
||||||
|
@ -18,7 +19,7 @@ class AllTypes(ChoicesMixin):
|
||||||
choices: list
|
choices: list
|
||||||
includes = [
|
includes = [
|
||||||
HostTypes, DeviceTypes, DatabaseTypes,
|
HostTypes, DeviceTypes, DatabaseTypes,
|
||||||
CloudTypes, WebTypes, CustomTypes
|
CloudTypes, WebTypes, CustomTypes, GPTTypes
|
||||||
]
|
]
|
||||||
_category_constrains = {}
|
_category_constrains = {}
|
||||||
|
|
||||||
|
@ -147,6 +148,7 @@ class AllTypes(ChoicesMixin):
|
||||||
(Category.DATABASE, DatabaseTypes),
|
(Category.DATABASE, DatabaseTypes),
|
||||||
(Category.CLOUD, CloudTypes),
|
(Category.CLOUD, CloudTypes),
|
||||||
(Category.WEB, WebTypes),
|
(Category.WEB, WebTypes),
|
||||||
|
(Category.GPT, GPTTypes),
|
||||||
(Category.CUSTOM, CustomTypes),
|
(Category.CUSTOM, CustomTypes),
|
||||||
)
|
)
|
||||||
|
|
||||||
|
@ -193,7 +195,6 @@ class AllTypes(ChoicesMixin):
|
||||||
}
|
}
|
||||||
return node
|
return node
|
||||||
|
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def asset_to_node(cls, asset, pid):
|
def asset_to_node(cls, asset, pid):
|
||||||
node = {
|
node = {
|
||||||
|
@ -351,7 +352,7 @@ class AllTypes(ChoicesMixin):
|
||||||
|
|
||||||
for d in platform_datas:
|
for d in platform_datas:
|
||||||
name = d['name']
|
name = d['name']
|
||||||
# print("\t - Platform: {}".format(name))
|
print("\t - Platform: {}".format(name))
|
||||||
_automation = d.pop('automation', {})
|
_automation = d.pop('automation', {})
|
||||||
_protocols = d.pop('_protocols', [])
|
_protocols = d.pop('_protocols', [])
|
||||||
_protocols_setting = d.pop('protocols_setting', {})
|
_protocols_setting = d.pop('protocols_setting', {})
|
||||||
|
@ -364,7 +365,7 @@ class AllTypes(ChoicesMixin):
|
||||||
setting = _protocols_setting.get(p['name'], {})
|
setting = _protocols_setting.get(p['name'], {})
|
||||||
p['required'] = setting.pop('required', False)
|
p['required'] = setting.pop('required', False)
|
||||||
p['default'] = setting.pop('default', False)
|
p['default'] = setting.pop('default', False)
|
||||||
p['setting'] = {**p.get('setting', {}), **setting}
|
p['setting'] = {**p.get('setting', {}).get('default', ''), **setting}
|
||||||
|
|
||||||
platform_data = {
|
platform_data = {
|
||||||
**default_platform_data, **d,
|
**default_platform_data, **d,
|
||||||
|
|
|
@ -1,4 +1,3 @@
|
||||||
from django.db import models
|
|
||||||
from django.utils.translation import gettext_lazy as _
|
from django.utils.translation import gettext_lazy as _
|
||||||
|
|
||||||
from .base import BaseType
|
from .base import BaseType
|
||||||
|
@ -53,9 +52,3 @@ class WebTypes(BaseType):
|
||||||
return [
|
return [
|
||||||
cls.WEBSITE,
|
cls.WEBSITE,
|
||||||
]
|
]
|
||||||
|
|
||||||
|
|
||||||
class FillType(models.TextChoices):
|
|
||||||
no = 'no', _('Disabled')
|
|
||||||
basic = 'basic', _('Basic')
|
|
||||||
script = 'script', _('Script')
|
|
||||||
|
|
|
@ -2,6 +2,7 @@
|
||||||
|
|
||||||
import django.db
|
import django.db
|
||||||
from django.db import migrations, models
|
from django.db import migrations, models
|
||||||
|
|
||||||
import common.db.fields
|
import common.db.fields
|
||||||
|
|
||||||
|
|
||||||
|
@ -118,7 +119,7 @@ class Migration(migrations.Migration):
|
||||||
primary_key=True, serialize=False, to='assets.asset')),
|
primary_key=True, serialize=False, to='assets.asset')),
|
||||||
],
|
],
|
||||||
options={
|
options={
|
||||||
'verbose_name': 'Host',
|
'verbose_name': 'Host',
|
||||||
},
|
},
|
||||||
),
|
),
|
||||||
migrations.CreateModel(
|
migrations.CreateModel(
|
||||||
|
|
|
@ -137,6 +137,25 @@ def migrate_to_nodes(apps, *args):
|
||||||
parent.save()
|
parent.save()
|
||||||
|
|
||||||
|
|
||||||
|
def migrate_ori_host_to_devices(apps, *args):
|
||||||
|
device_model = apps.get_model('assets', 'Device')
|
||||||
|
asset_model = apps.get_model('assets', 'Asset')
|
||||||
|
host_model = apps.get_model('assets', 'Host')
|
||||||
|
hosts_need_migrate_to_device = host_model.objects.filter(asset_ptr__platform__category='device')
|
||||||
|
assets = asset_model.objects.filter(id__in=hosts_need_migrate_to_device.values_list('asset_ptr_id', flat=True))
|
||||||
|
assets_map = {asset.id: asset for asset in assets}
|
||||||
|
|
||||||
|
print("\t- Migrate ori host to device: ", len(hosts_need_migrate_to_device))
|
||||||
|
for host in hosts_need_migrate_to_device:
|
||||||
|
asset = assets_map.get(host.asset_ptr_id)
|
||||||
|
if not asset:
|
||||||
|
continue
|
||||||
|
device = device_model(asset_ptr_id=asset.id)
|
||||||
|
device.__dict__.update(asset.__dict__)
|
||||||
|
device.save()
|
||||||
|
host.delete(keep_parents=True)
|
||||||
|
|
||||||
|
|
||||||
class Migration(migrations.Migration):
|
class Migration(migrations.Migration):
|
||||||
dependencies = [
|
dependencies = [
|
||||||
('assets', '0097_auto_20220426_1558'),
|
('assets', '0097_auto_20220426_1558'),
|
||||||
|
@ -146,5 +165,6 @@ class Migration(migrations.Migration):
|
||||||
operations = [
|
operations = [
|
||||||
migrations.RunPython(migrate_database_to_asset),
|
migrations.RunPython(migrate_database_to_asset),
|
||||||
migrations.RunPython(migrate_cloud_to_asset),
|
migrations.RunPython(migrate_cloud_to_asset),
|
||||||
migrations.RunPython(migrate_to_nodes)
|
migrations.RunPython(migrate_to_nodes),
|
||||||
|
migrations.RunPython(migrate_ori_host_to_devices),
|
||||||
]
|
]
|
||||||
|
|
|
@ -2,16 +2,13 @@
|
||||||
|
|
||||||
from django.db import migrations, models
|
from django.db import migrations, models
|
||||||
|
|
||||||
from assets.const import AllTypes
|
|
||||||
|
|
||||||
|
|
||||||
def migrate_automation_push_account_params(apps, schema_editor):
|
def migrate_automation_push_account_params(apps, schema_editor):
|
||||||
platform_automation_model = apps.get_model('assets', 'PlatformAutomation')
|
platform_automation_model = apps.get_model('assets', 'PlatformAutomation')
|
||||||
platform_automation_methods = AllTypes.get_automation_methods()
|
|
||||||
methods_id_data_map = {
|
methods_id_data_map = {
|
||||||
i['id']: None if i['params_serializer'] is None else i['params_serializer']({}).data
|
'push_account_aix': {'sudo': '/bin/whoami', 'shell': '/bin/bash', 'home': '', 'groups': ''},
|
||||||
for i in platform_automation_methods
|
'push_account_posix': {'sudo': '/bin/whoami', 'shell': '/bin/bash', 'home': '', 'groups': ''},
|
||||||
if i['method'] == 'push_account'
|
'push_account_local_windows': {'groups': 'Users,Remote Desktop Users'},
|
||||||
}
|
}
|
||||||
automation_objs = []
|
automation_objs = []
|
||||||
for automation in platform_automation_model.objects.all():
|
for automation in platform_automation_model.objects.all():
|
||||||
|
|
|
@ -0,0 +1,39 @@
|
||||||
|
# Generated by Django 3.2.19 on 2023-06-30 08:13
|
||||||
|
|
||||||
|
import django.db.models.deletion
|
||||||
|
from django.db import migrations, models
|
||||||
|
|
||||||
|
|
||||||
|
def add_chatgpt_platform(apps, schema_editor):
|
||||||
|
platform_cls = apps.get_model('assets', 'Platform')
|
||||||
|
automation_cls = apps.get_model('assets', 'PlatformAutomation')
|
||||||
|
platform = platform_cls.objects.create(
|
||||||
|
name='ChatGPT', internal=True, category='gpt', type='chatgpt',
|
||||||
|
domain_enabled=False, su_enabled=False, comment='ChatGPT',
|
||||||
|
created_by='System', updated_by='System',
|
||||||
|
)
|
||||||
|
platform.protocols.create(name='chatgpt', port=443, primary=True, setting={'api_mode': 'gpt-3.5-turbo'})
|
||||||
|
automation_cls.objects.create(ansible_enabled=False, platform=platform)
|
||||||
|
|
||||||
|
|
||||||
|
class Migration(migrations.Migration):
|
||||||
|
dependencies = [
|
||||||
|
('assets', '0119_assets_add_default_node'),
|
||||||
|
]
|
||||||
|
|
||||||
|
operations = [
|
||||||
|
migrations.CreateModel(
|
||||||
|
name='GPT',
|
||||||
|
fields=[
|
||||||
|
('asset_ptr',
|
||||||
|
models.OneToOneField(auto_created=True, on_delete=django.db.models.deletion.CASCADE, parent_link=True,
|
||||||
|
primary_key=True, serialize=False, to='assets.asset')),
|
||||||
|
('proxy', models.CharField(blank=True, default='', max_length=128, verbose_name='Proxy')),
|
||||||
|
],
|
||||||
|
options={
|
||||||
|
'verbose_name': 'Web',
|
||||||
|
},
|
||||||
|
bases=('assets.asset',),
|
||||||
|
),
|
||||||
|
migrations.RunPython(add_chatgpt_platform)
|
||||||
|
]
|
|
@ -3,5 +3,6 @@ from .common import *
|
||||||
from .custom import *
|
from .custom import *
|
||||||
from .database import *
|
from .database import *
|
||||||
from .device import *
|
from .device import *
|
||||||
|
from .gpt import *
|
||||||
from .host import *
|
from .host import *
|
||||||
from .web import *
|
from .web import *
|
||||||
|
|
|
@ -206,15 +206,14 @@ class Asset(NodesRelationMixin, AbsConnectivity, JSONFilterMixin, JMSOrgBaseMode
|
||||||
@lazyproperty
|
@lazyproperty
|
||||||
def auto_config(self):
|
def auto_config(self):
|
||||||
platform = self.platform
|
platform = self.platform
|
||||||
automation = self.platform.automation
|
|
||||||
auto_config = {
|
auto_config = {
|
||||||
'su_enabled': platform.su_enabled,
|
'su_enabled': platform.su_enabled,
|
||||||
'domain_enabled': platform.domain_enabled,
|
'domain_enabled': platform.domain_enabled,
|
||||||
'ansible_enabled': False
|
'ansible_enabled': False
|
||||||
}
|
}
|
||||||
|
automation = getattr(self.platform, 'automation', None)
|
||||||
if not automation:
|
if not automation:
|
||||||
return auto_config
|
return auto_config
|
||||||
|
|
||||||
auto_config.update(model_to_dict(automation))
|
auto_config.update(model_to_dict(automation))
|
||||||
return auto_config
|
return auto_config
|
||||||
|
|
||||||
|
|
|
@ -0,0 +1,11 @@
|
||||||
|
from django.db import models
|
||||||
|
from django.utils.translation import gettext_lazy as _
|
||||||
|
|
||||||
|
from .common import Asset
|
||||||
|
|
||||||
|
|
||||||
|
class GPT(Asset):
|
||||||
|
proxy = models.CharField(max_length=128, blank=True, default='', verbose_name=_("Proxy"))
|
||||||
|
|
||||||
|
class Meta:
|
||||||
|
verbose_name = _("Web")
|
|
@ -1,7 +1,7 @@
|
||||||
from django.db import models
|
from django.db import models
|
||||||
from django.utils.translation import gettext_lazy as _
|
from django.utils.translation import gettext_lazy as _
|
||||||
|
|
||||||
from assets.const.web import FillType
|
from assets.const import FillType
|
||||||
from .common import Asset
|
from .common import Asset
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -7,12 +7,9 @@ from __future__ import unicode_literals
|
||||||
import uuid
|
import uuid
|
||||||
|
|
||||||
from django.db import models
|
from django.db import models
|
||||||
import logging
|
|
||||||
from django.utils.translation import ugettext_lazy as _
|
from django.utils.translation import ugettext_lazy as _
|
||||||
|
|
||||||
|
|
||||||
__all__ = ['AssetGroup']
|
__all__ = ['AssetGroup']
|
||||||
logger = logging.getLogger(__name__)
|
|
||||||
|
|
||||||
|
|
||||||
class AssetGroup(models.Model):
|
class AssetGroup(models.Model):
|
||||||
|
|
|
@ -429,18 +429,6 @@ class NodeAssetsMixin(NodeAllAssetsMappingMixin):
|
||||||
assets = Asset.objects.filter(nodes=self)
|
assets = Asset.objects.filter(nodes=self)
|
||||||
return assets.distinct()
|
return assets.distinct()
|
||||||
|
|
||||||
def get_assets_for_tree(self):
|
|
||||||
return self.get_assets().only(
|
|
||||||
"id", "name", "address", "platform_id",
|
|
||||||
"org_id", "is_active"
|
|
||||||
).prefetch_related('platform')
|
|
||||||
|
|
||||||
def get_all_assets_for_tree(self):
|
|
||||||
return self.get_all_assets().only(
|
|
||||||
"id", "name", "address", "platform_id",
|
|
||||||
"org_id", "is_active"
|
|
||||||
).prefetch_related('platform')
|
|
||||||
|
|
||||||
def get_valid_assets(self):
|
def get_valid_assets(self):
|
||||||
return self.get_assets().valid()
|
return self.get_assets().valid()
|
||||||
|
|
||||||
|
|
|
@ -8,6 +8,8 @@ from common.db.models import JMSBaseModel
|
||||||
|
|
||||||
__all__ = ['Platform', 'PlatformProtocol', 'PlatformAutomation']
|
__all__ = ['Platform', 'PlatformProtocol', 'PlatformAutomation']
|
||||||
|
|
||||||
|
from common.utils import lazyproperty
|
||||||
|
|
||||||
|
|
||||||
class PlatformProtocol(models.Model):
|
class PlatformProtocol(models.Model):
|
||||||
name = models.CharField(max_length=32, verbose_name=_('Name'))
|
name = models.CharField(max_length=32, verbose_name=_('Name'))
|
||||||
|
@ -26,6 +28,11 @@ class PlatformProtocol(models.Model):
|
||||||
def secret_types(self):
|
def secret_types(self):
|
||||||
return Protocol.settings().get(self.name, {}).get('secret_types', ['password'])
|
return Protocol.settings().get(self.name, {}).get('secret_types', ['password'])
|
||||||
|
|
||||||
|
@lazyproperty
|
||||||
|
def port_from_addr(self):
|
||||||
|
from assets.const.protocol import Protocol as ProtocolConst
|
||||||
|
return ProtocolConst.settings().get(self.name, {}).get('port_from_addr', False)
|
||||||
|
|
||||||
|
|
||||||
class PlatformAutomation(models.Model):
|
class PlatformAutomation(models.Model):
|
||||||
ansible_enabled = models.BooleanField(default=False, verbose_name=_("Enabled"))
|
ansible_enabled = models.BooleanField(default=False, verbose_name=_("Enabled"))
|
||||||
|
|
|
@ -4,5 +4,6 @@ from .common import *
|
||||||
from .custom import *
|
from .custom import *
|
||||||
from .database import *
|
from .database import *
|
||||||
from .device import *
|
from .device import *
|
||||||
|
from .gpt import *
|
||||||
from .host import *
|
from .host import *
|
||||||
from .web import *
|
from .web import *
|
||||||
|
|
|
@ -124,6 +124,7 @@ class AssetSerializer(BulkOrgResourceModelSerializer, WritableNestedModelSeriali
|
||||||
protocols = AssetProtocolsSerializer(many=True, required=False, label=_('Protocols'), default=())
|
protocols = AssetProtocolsSerializer(many=True, required=False, label=_('Protocols'), default=())
|
||||||
accounts = AssetAccountSerializer(many=True, required=False, allow_null=True, write_only=True, label=_('Account'))
|
accounts = AssetAccountSerializer(many=True, required=False, allow_null=True, write_only=True, label=_('Account'))
|
||||||
nodes_display = serializers.ListField(read_only=False, required=False, label=_("Node path"))
|
nodes_display = serializers.ListField(read_only=False, required=False, label=_("Node path"))
|
||||||
|
_accounts = None
|
||||||
|
|
||||||
class Meta:
|
class Meta:
|
||||||
model = Asset
|
model = Asset
|
||||||
|
@ -151,6 +152,13 @@ class AssetSerializer(BulkOrgResourceModelSerializer, WritableNestedModelSeriali
|
||||||
def __init__(self, *args, **kwargs):
|
def __init__(self, *args, **kwargs):
|
||||||
super().__init__(*args, **kwargs)
|
super().__init__(*args, **kwargs)
|
||||||
self._init_field_choices()
|
self._init_field_choices()
|
||||||
|
self._extract_accounts()
|
||||||
|
|
||||||
|
def _extract_accounts(self):
|
||||||
|
if not getattr(self, 'initial_data', None):
|
||||||
|
return
|
||||||
|
accounts = self.initial_data.pop('accounts', None)
|
||||||
|
self._accounts = accounts
|
||||||
|
|
||||||
def _get_protocols_required_default(self):
|
def _get_protocols_required_default(self):
|
||||||
platform = self._asset_platform
|
platform = self._asset_platform
|
||||||
|
@ -167,10 +175,9 @@ class AssetSerializer(BulkOrgResourceModelSerializer, WritableNestedModelSeriali
|
||||||
return
|
return
|
||||||
|
|
||||||
protocols_required, protocols_default = self._get_protocols_required_default()
|
protocols_required, protocols_default = self._get_protocols_required_default()
|
||||||
protocols_data = [
|
protocol_map = {str(protocol.id): protocol for protocol in protocols_required + protocols_default}
|
||||||
{'name': p.name, 'port': p.port}
|
protocols = list(protocol_map.values())
|
||||||
for p in protocols_required + protocols_default
|
protocols_data = [{'name': p.name, 'port': p.port} for p in protocols]
|
||||||
]
|
|
||||||
self.initial_data['protocols'] = protocols_data
|
self.initial_data['protocols'] = protocols_data
|
||||||
|
|
||||||
def _init_field_choices(self):
|
def _init_field_choices(self):
|
||||||
|
@ -263,7 +270,7 @@ class AssetSerializer(BulkOrgResourceModelSerializer, WritableNestedModelSeriali
|
||||||
error = p.get('name') + ': ' + _("port out of range (0-65535)")
|
error = p.get('name') + ': ' + _("port out of range (0-65535)")
|
||||||
raise serializers.ValidationError(error)
|
raise serializers.ValidationError(error)
|
||||||
|
|
||||||
protocols_required, protocols_default = self._get_protocols_required_default()
|
protocols_required, __ = self._get_protocols_required_default()
|
||||||
protocols_not_found = [p.name for p in protocols_required if p.name not in protocols_data_map]
|
protocols_not_found = [p.name for p in protocols_required if p.name not in protocols_data_map]
|
||||||
if protocols_not_found:
|
if protocols_not_found:
|
||||||
raise serializers.ValidationError({
|
raise serializers.ValidationError({
|
||||||
|
@ -277,7 +284,6 @@ class AssetSerializer(BulkOrgResourceModelSerializer, WritableNestedModelSeriali
|
||||||
return
|
return
|
||||||
for data in accounts_data:
|
for data in accounts_data:
|
||||||
data['asset'] = asset.id
|
data['asset'] = asset.id
|
||||||
|
|
||||||
s = AssetAccountSerializer(data=accounts_data, many=True)
|
s = AssetAccountSerializer(data=accounts_data, many=True)
|
||||||
s.is_valid(raise_exception=True)
|
s.is_valid(raise_exception=True)
|
||||||
s.save()
|
s.save()
|
||||||
|
@ -285,16 +291,13 @@ class AssetSerializer(BulkOrgResourceModelSerializer, WritableNestedModelSeriali
|
||||||
@atomic
|
@atomic
|
||||||
def create(self, validated_data):
|
def create(self, validated_data):
|
||||||
nodes_display = validated_data.pop('nodes_display', '')
|
nodes_display = validated_data.pop('nodes_display', '')
|
||||||
accounts = validated_data.pop('accounts', [])
|
|
||||||
instance = super().create(validated_data)
|
instance = super().create(validated_data)
|
||||||
self.accounts_create(accounts, instance)
|
self.accounts_create(self._accounts, instance)
|
||||||
self.perform_nodes_display_create(instance, nodes_display)
|
self.perform_nodes_display_create(instance, nodes_display)
|
||||||
return instance
|
return instance
|
||||||
|
|
||||||
@atomic
|
@atomic
|
||||||
def update(self, instance, validated_data):
|
def update(self, instance, validated_data):
|
||||||
if not validated_data.get('accounts'):
|
|
||||||
validated_data.pop('accounts', None)
|
|
||||||
nodes_display = validated_data.pop('nodes_display', '')
|
nodes_display = validated_data.pop('nodes_display', '')
|
||||||
instance = super().update(instance, validated_data)
|
instance = super().update(instance, validated_data)
|
||||||
self.perform_nodes_display_create(instance, nodes_display)
|
self.perform_nodes_display_create(instance, nodes_display)
|
||||||
|
|
|
@ -0,0 +1,33 @@
|
||||||
|
from django.utils.translation import gettext_lazy as _
|
||||||
|
from rest_framework import serializers
|
||||||
|
|
||||||
|
from assets.models import GPT
|
||||||
|
from .common import AssetSerializer
|
||||||
|
|
||||||
|
__all__ = ['GPTSerializer']
|
||||||
|
|
||||||
|
|
||||||
|
class GPTSerializer(AssetSerializer):
|
||||||
|
class Meta(AssetSerializer.Meta):
|
||||||
|
model = GPT
|
||||||
|
fields = AssetSerializer.Meta.fields + [
|
||||||
|
'proxy',
|
||||||
|
]
|
||||||
|
extra_kwargs = {
|
||||||
|
**AssetSerializer.Meta.extra_kwargs,
|
||||||
|
'proxy': {
|
||||||
|
'help_text': _(
|
||||||
|
'If the server cannot directly connect to the API address, '
|
||||||
|
'you need set up an HTTP proxy. '
|
||||||
|
'e.g. http(s)://host:port'
|
||||||
|
),
|
||||||
|
'label': _('HTTP proxy')}
|
||||||
|
}
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def validate_proxy(value):
|
||||||
|
if value and not value.startswith(("http://", "https://")):
|
||||||
|
raise serializers.ValidationError(
|
||||||
|
_('Proxy must start with http:// or https://')
|
||||||
|
)
|
||||||
|
return value
|
|
@ -1,7 +1,7 @@
|
||||||
from django.utils.translation import gettext_lazy as _
|
from django.utils.translation import gettext_lazy as _
|
||||||
from rest_framework import serializers
|
from rest_framework import serializers
|
||||||
|
|
||||||
from assets.const.web import FillType
|
from assets.const import FillType
|
||||||
from assets.models import Database, Web
|
from assets.models import Database, Web
|
||||||
from common.serializers.fields import LabeledChoiceField
|
from common.serializers.fields import LabeledChoiceField
|
||||||
|
|
||||||
|
@ -14,6 +14,7 @@ class DatabaseSpecSerializer(serializers.ModelSerializer):
|
||||||
|
|
||||||
class WebSpecSerializer(serializers.ModelSerializer):
|
class WebSpecSerializer(serializers.ModelSerializer):
|
||||||
autofill = LabeledChoiceField(choices=FillType.choices, label=_('Autofill'))
|
autofill = LabeledChoiceField(choices=FillType.choices, label=_('Autofill'))
|
||||||
|
|
||||||
class Meta:
|
class Meta:
|
||||||
model = Web
|
model = Web
|
||||||
fields = [
|
fields = [
|
||||||
|
|
|
@ -51,14 +51,14 @@ class AutomationExecutionSerializer(serializers.ModelSerializer):
|
||||||
from assets.const import AutomationTypes as AssetTypes
|
from assets.const import AutomationTypes as AssetTypes
|
||||||
from accounts.const import AutomationTypes as AccountTypes
|
from accounts.const import AutomationTypes as AccountTypes
|
||||||
tp_dict = dict(AssetTypes.choices) | dict(AccountTypes.choices)
|
tp_dict = dict(AssetTypes.choices) | dict(AccountTypes.choices)
|
||||||
tp = obj.snapshot['type']
|
tp = obj.snapshot.get('type', '')
|
||||||
snapshot = {
|
snapshot = {
|
||||||
'type': {'value': tp, 'label': tp_dict.get(tp, tp)},
|
'type': {'value': tp, 'label': tp_dict.get(tp, tp)},
|
||||||
'name': obj.snapshot['name'],
|
'name': obj.snapshot.get('name'),
|
||||||
'comment': obj.snapshot['comment'],
|
'comment': obj.snapshot.get('comment'),
|
||||||
'accounts': obj.snapshot['accounts'],
|
'accounts': obj.snapshot.get('accounts'),
|
||||||
'node_amount': len(obj.snapshot['nodes']),
|
'node_amount': len(obj.snapshot.get('nodes', [])),
|
||||||
'asset_amount': len(obj.snapshot['assets']),
|
'asset_amount': len(obj.snapshot.get('assets', [])),
|
||||||
}
|
}
|
||||||
return snapshot
|
return snapshot
|
||||||
|
|
||||||
|
|
|
@ -1,48 +1,17 @@
|
||||||
|
from django.db.models import QuerySet
|
||||||
from django.utils.translation import gettext_lazy as _
|
from django.utils.translation import gettext_lazy as _
|
||||||
from rest_framework import serializers
|
from rest_framework import serializers
|
||||||
|
|
||||||
from assets.const.web import FillType
|
from common.serializers import (
|
||||||
from common.serializers import WritableNestedModelSerializer, type_field_map
|
WritableNestedModelSerializer, type_field_map, MethodSerializer,
|
||||||
|
DictSerializer, create_serializer_class
|
||||||
|
)
|
||||||
from common.serializers.fields import LabeledChoiceField
|
from common.serializers.fields import LabeledChoiceField
|
||||||
from common.utils import lazyproperty
|
from common.utils import lazyproperty
|
||||||
from ..const import Category, AllTypes
|
from ..const import Category, AllTypes, Protocol
|
||||||
from ..models import Platform, PlatformProtocol, PlatformAutomation
|
from ..models import Platform, PlatformProtocol, PlatformAutomation
|
||||||
|
|
||||||
__all__ = ["PlatformSerializer", "PlatformOpsMethodSerializer"]
|
__all__ = ["PlatformSerializer", "PlatformOpsMethodSerializer", "PlatformProtocolSerializer"]
|
||||||
|
|
||||||
|
|
||||||
class ProtocolSettingSerializer(serializers.Serializer):
|
|
||||||
SECURITY_CHOICES = [
|
|
||||||
("any", "Any"),
|
|
||||||
("rdp", "RDP"),
|
|
||||||
("tls", "TLS"),
|
|
||||||
("nla", "NLA"),
|
|
||||||
]
|
|
||||||
# RDP
|
|
||||||
console = serializers.BooleanField(required=False, default=False)
|
|
||||||
security = serializers.ChoiceField(choices=SECURITY_CHOICES, default="any")
|
|
||||||
|
|
||||||
# SFTP
|
|
||||||
sftp_enabled = serializers.BooleanField(default=True, label=_("SFTP enabled"))
|
|
||||||
sftp_home = serializers.CharField(default="/tmp", label=_("SFTP home"))
|
|
||||||
|
|
||||||
# HTTP
|
|
||||||
autofill = serializers.ChoiceField(default='basic', choices=FillType.choices, label=_("Autofill"))
|
|
||||||
username_selector = serializers.CharField(
|
|
||||||
default="", allow_blank=True, label=_("Username selector")
|
|
||||||
)
|
|
||||||
password_selector = serializers.CharField(
|
|
||||||
default="", allow_blank=True, label=_("Password selector")
|
|
||||||
)
|
|
||||||
submit_selector = serializers.CharField(
|
|
||||||
default="", allow_blank=True, label=_("Submit selector")
|
|
||||||
)
|
|
||||||
script = serializers.JSONField(default=list, label=_("Script"))
|
|
||||||
# Redis
|
|
||||||
auth_username = serializers.BooleanField(default=False, label=_("Auth with username"))
|
|
||||||
|
|
||||||
# WinRM
|
|
||||||
use_ssl = serializers.BooleanField(default=False, label=_("Use SSL"))
|
|
||||||
|
|
||||||
|
|
||||||
class PlatformAutomationSerializer(serializers.ModelSerializer):
|
class PlatformAutomationSerializer(serializers.ModelSerializer):
|
||||||
|
@ -76,15 +45,57 @@ class PlatformAutomationSerializer(serializers.ModelSerializer):
|
||||||
|
|
||||||
|
|
||||||
class PlatformProtocolSerializer(serializers.ModelSerializer):
|
class PlatformProtocolSerializer(serializers.ModelSerializer):
|
||||||
setting = ProtocolSettingSerializer(required=False, allow_null=True)
|
setting = MethodSerializer(required=False, label=_("Setting"))
|
||||||
|
port_from_addr = serializers.BooleanField(label=_("Port from addr"), read_only=True)
|
||||||
|
|
||||||
class Meta:
|
class Meta:
|
||||||
model = PlatformProtocol
|
model = PlatformProtocol
|
||||||
fields = [
|
fields = [
|
||||||
"id", "name", "port", "primary",
|
"id", "name", "port", "port_from_addr",
|
||||||
"required", "default", "public",
|
"primary", "required", "default", "public",
|
||||||
"secret_types", "setting",
|
"secret_types", "setting",
|
||||||
]
|
]
|
||||||
|
extra_kwargs = {
|
||||||
|
"primary": {
|
||||||
|
"help_text": _(
|
||||||
|
"This protocol is primary, and it must be set when adding assets. "
|
||||||
|
"Additionally, there can only be one primary protocol."
|
||||||
|
)
|
||||||
|
},
|
||||||
|
"required": {
|
||||||
|
"help_text": _("This protocol is required, and it must be set when adding assets.")
|
||||||
|
},
|
||||||
|
"default": {
|
||||||
|
"help_text": _("This protocol is default, when adding assets, it will be displayed by default.")
|
||||||
|
},
|
||||||
|
"public": {
|
||||||
|
"help_text": _("This protocol is public, asset will show this protocol to user")
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
def get_setting_serializer(self):
|
||||||
|
request = self.context.get('request')
|
||||||
|
default_field = DictSerializer(required=False)
|
||||||
|
|
||||||
|
if not request:
|
||||||
|
return default_field
|
||||||
|
|
||||||
|
if self.instance and isinstance(self.instance, (QuerySet, list)):
|
||||||
|
instance = self.instance[0]
|
||||||
|
else:
|
||||||
|
instance = self.instance
|
||||||
|
|
||||||
|
protocol = request.query_params.get('name', '')
|
||||||
|
if instance and not protocol:
|
||||||
|
protocol = instance.name
|
||||||
|
|
||||||
|
protocol_settings = Protocol.settings()
|
||||||
|
setting_fields = protocol_settings.get(protocol, {}).get('setting')
|
||||||
|
if not setting_fields:
|
||||||
|
return default_field
|
||||||
|
setting_fields = [{'name': k, **v} for k, v in setting_fields.items()]
|
||||||
|
name = '{}ProtocolSettingSerializer'.format(protocol.capitalize())
|
||||||
|
return create_serializer_class(name, setting_fields)()
|
||||||
|
|
||||||
def to_file_representation(self, data):
|
def to_file_representation(self, data):
|
||||||
return '{name}/{port}'.format(**data)
|
return '{name}/{port}'.format(**data)
|
||||||
|
@ -144,6 +155,18 @@ class PlatformSerializer(WritableNestedModelSerializer):
|
||||||
"domain_default": {"label": _('Default Domain')},
|
"domain_default": {"label": _('Default Domain')},
|
||||||
}
|
}
|
||||||
|
|
||||||
|
def __init__(self, *args, **kwargs):
|
||||||
|
super().__init__(*args, **kwargs)
|
||||||
|
self.set_initial_value()
|
||||||
|
|
||||||
|
def set_initial_value(self):
|
||||||
|
if not hasattr(self, 'initial_data'):
|
||||||
|
return
|
||||||
|
if self.instance:
|
||||||
|
return
|
||||||
|
if not self.initial_data.get('automation'):
|
||||||
|
self.initial_data['automation'] = {}
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def platform_category_type(self):
|
def platform_category_type(self):
|
||||||
if self.instance:
|
if self.instance:
|
||||||
|
@ -189,8 +212,9 @@ class PlatformSerializer(WritableNestedModelSerializer):
|
||||||
|
|
||||||
def validate_automation(self, automation):
|
def validate_automation(self, automation):
|
||||||
automation = automation or {}
|
automation = automation or {}
|
||||||
automation = automation.get('ansible_enabled', False) \
|
ansible_enabled = automation.get('ansible_enabled', False) \
|
||||||
and self.constraints['automation'].get('ansible_enabled', False)
|
and self.constraints['automation'].get('ansible_enabled', False)
|
||||||
|
automation['ansible_enable'] = ansible_enabled
|
||||||
return automation
|
return automation
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -14,6 +14,7 @@ router.register(r'devices', api.DeviceViewSet, 'device')
|
||||||
router.register(r'databases', api.DatabaseViewSet, 'database')
|
router.register(r'databases', api.DatabaseViewSet, 'database')
|
||||||
router.register(r'webs', api.WebViewSet, 'web')
|
router.register(r'webs', api.WebViewSet, 'web')
|
||||||
router.register(r'clouds', api.CloudViewSet, 'cloud')
|
router.register(r'clouds', api.CloudViewSet, 'cloud')
|
||||||
|
router.register(r'gpts', api.GPTViewSet, 'gpt')
|
||||||
router.register(r'customs', api.CustomViewSet, 'custom')
|
router.register(r'customs', api.CustomViewSet, 'custom')
|
||||||
router.register(r'platforms', api.AssetPlatformViewSet, 'platform')
|
router.register(r'platforms', api.AssetPlatformViewSet, 'platform')
|
||||||
router.register(r'labels', api.LabelViewSet, 'label')
|
router.register(r'labels', api.LabelViewSet, 'label')
|
||||||
|
@ -21,6 +22,7 @@ router.register(r'nodes', api.NodeViewSet, 'node')
|
||||||
router.register(r'domains', api.DomainViewSet, 'domain')
|
router.register(r'domains', api.DomainViewSet, 'domain')
|
||||||
router.register(r'gateways', api.GatewayViewSet, 'gateway')
|
router.register(r'gateways', api.GatewayViewSet, 'gateway')
|
||||||
router.register(r'favorite-assets', api.FavoriteAssetViewSet, 'favorite-asset')
|
router.register(r'favorite-assets', api.FavoriteAssetViewSet, 'favorite-asset')
|
||||||
|
router.register(r'protocol-settings', api.PlatformProtocolViewSet, 'protocol-setting')
|
||||||
|
|
||||||
urlpatterns = [
|
urlpatterns = [
|
||||||
# path('assets/<uuid:pk>/gateways/', api.AssetGatewayListApi.as_view(), name='asset-gateway-list'),
|
# path('assets/<uuid:pk>/gateways/', api.AssetGatewayListApi.as_view(), name='asset-gateway-list'),
|
||||||
|
@ -46,7 +48,8 @@ urlpatterns = [
|
||||||
path('nodes/<uuid:pk>/tasks/', api.NodeTaskCreateApi.as_view(), name='node-task-create'),
|
path('nodes/<uuid:pk>/tasks/', api.NodeTaskCreateApi.as_view(), name='node-task-create'),
|
||||||
|
|
||||||
path('gateways/<uuid:pk>/test-connective/', api.GatewayTestConnectionApi.as_view(), name='test-gateway-connective'),
|
path('gateways/<uuid:pk>/test-connective/', api.GatewayTestConnectionApi.as_view(), name='test-gateway-connective'),
|
||||||
path('platform-automation-methods/', api.PlatformAutomationMethodsApi.as_view(), name='platform-automation-methods'),
|
path('platform-automation-methods/', api.PlatformAutomationMethodsApi.as_view(),
|
||||||
|
name='platform-automation-methods'),
|
||||||
]
|
]
|
||||||
|
|
||||||
urlpatterns += router.urls
|
urlpatterns += router.urls
|
||||||
|
|
|
@ -42,7 +42,7 @@ def _get_instance_field_value(
|
||||||
if getattr(f, 'attname', None) in model_need_continue_fields:
|
if getattr(f, 'attname', None) in model_need_continue_fields:
|
||||||
continue
|
continue
|
||||||
|
|
||||||
value = getattr(instance, f.name) or getattr(instance, f.attname)
|
value = getattr(instance, f.name, None) or getattr(instance, f.attname, None)
|
||||||
if not isinstance(value, bool) and not value:
|
if not isinstance(value, bool) and not value:
|
||||||
continue
|
continue
|
||||||
|
|
||||||
|
|
|
@ -8,12 +8,13 @@ from django.http import HttpResponse
|
||||||
from django.shortcuts import get_object_or_404
|
from django.shortcuts import get_object_or_404
|
||||||
from django.utils import timezone
|
from django.utils import timezone
|
||||||
from django.utils.translation import ugettext_lazy as _
|
from django.utils.translation import ugettext_lazy as _
|
||||||
from rest_framework import status
|
from rest_framework import status, serializers
|
||||||
from rest_framework.decorators import action
|
from rest_framework.decorators import action
|
||||||
from rest_framework.exceptions import PermissionDenied
|
from rest_framework.exceptions import PermissionDenied, ValidationError
|
||||||
from rest_framework.request import Request
|
from rest_framework.request import Request
|
||||||
from rest_framework.response import Response
|
from rest_framework.response import Response
|
||||||
|
|
||||||
|
from accounts.const import AliasAccount
|
||||||
from common.api import JMSModelViewSet
|
from common.api import JMSModelViewSet
|
||||||
from common.exceptions import JMSException
|
from common.exceptions import JMSException
|
||||||
from common.utils import random_string, get_logger, get_request_ip
|
from common.utils import random_string, get_logger, get_request_ip
|
||||||
|
@ -22,12 +23,12 @@ from common.utils.http import is_true, is_false
|
||||||
from orgs.mixins.api import RootOrgViewMixin
|
from orgs.mixins.api import RootOrgViewMixin
|
||||||
from perms.models import ActionChoices
|
from perms.models import ActionChoices
|
||||||
from terminal.connect_methods import NativeClient, ConnectMethodUtil
|
from terminal.connect_methods import NativeClient, ConnectMethodUtil
|
||||||
from terminal.models import EndpointRule
|
from terminal.models import EndpointRule, Endpoint
|
||||||
from ..models import ConnectionToken, date_expired_default
|
from ..models import ConnectionToken, date_expired_default
|
||||||
from ..serializers import (
|
from ..serializers import (
|
||||||
ConnectionTokenSerializer, ConnectionTokenSecretSerializer,
|
ConnectionTokenSerializer, ConnectionTokenSecretSerializer,
|
||||||
SuperConnectionTokenSerializer, ConnectTokenAppletOptionSerializer,
|
SuperConnectionTokenSerializer, ConnectTokenAppletOptionSerializer,
|
||||||
ConnectionTokenUpdateSerializer
|
ConnectionTokenReusableSerializer,
|
||||||
)
|
)
|
||||||
|
|
||||||
__all__ = ['ConnectionTokenViewSet', 'SuperConnectionTokenViewSet']
|
__all__ = ['ConnectionTokenViewSet', 'SuperConnectionTokenViewSet']
|
||||||
|
@ -165,11 +166,13 @@ class RDPFileClientProtocolURLMixin:
|
||||||
return data
|
return data
|
||||||
|
|
||||||
def get_smart_endpoint(self, protocol, asset=None):
|
def get_smart_endpoint(self, protocol, asset=None):
|
||||||
target_ip = asset.get_target_ip() if asset else ''
|
endpoint = Endpoint.match_by_instance_label(asset, protocol)
|
||||||
endpoint = EndpointRule.match_endpoint(
|
if not endpoint:
|
||||||
target_instance=asset, target_ip=target_ip,
|
target_ip = asset.get_target_ip() if asset else ''
|
||||||
protocol=protocol, request=self.request
|
endpoint = EndpointRule.match_endpoint(
|
||||||
)
|
target_instance=asset, target_ip=target_ip,
|
||||||
|
protocol=protocol, request=self.request
|
||||||
|
)
|
||||||
return endpoint
|
return endpoint
|
||||||
|
|
||||||
|
|
||||||
|
@ -211,6 +214,18 @@ class ExtraActionApiMixin(RDPFileClientProtocolURLMixin):
|
||||||
instance.expire()
|
instance.expire()
|
||||||
return Response(status=status.HTTP_204_NO_CONTENT)
|
return Response(status=status.HTTP_204_NO_CONTENT)
|
||||||
|
|
||||||
|
@action(methods=['PATCH'], detail=True, url_path='reuse')
|
||||||
|
def reuse(self, request, *args, **kwargs):
|
||||||
|
instance = self.get_object()
|
||||||
|
if not settings.CONNECTION_TOKEN_REUSABLE:
|
||||||
|
error = _('Reusable connection token is not allowed, global setting not enabled')
|
||||||
|
raise serializers.ValidationError(error)
|
||||||
|
serializer = self.get_serializer(instance, data=request.data, partial=True)
|
||||||
|
serializer.is_valid(raise_exception=True)
|
||||||
|
is_reusable = serializer.validated_data.get('is_reusable', False)
|
||||||
|
instance.set_reusable(is_reusable)
|
||||||
|
return Response(data=serializer.data)
|
||||||
|
|
||||||
@action(methods=['POST'], detail=False)
|
@action(methods=['POST'], detail=False)
|
||||||
def exchange(self, request, *args, **kwargs):
|
def exchange(self, request, *args, **kwargs):
|
||||||
pk = request.data.get('id', None) or request.data.get('pk', None)
|
pk = request.data.get('id', None) or request.data.get('pk', None)
|
||||||
|
@ -231,17 +246,16 @@ class ConnectionTokenViewSet(ExtraActionApiMixin, RootOrgViewMixin, JMSModelView
|
||||||
search_fields = filterset_fields
|
search_fields = filterset_fields
|
||||||
serializer_classes = {
|
serializer_classes = {
|
||||||
'default': ConnectionTokenSerializer,
|
'default': ConnectionTokenSerializer,
|
||||||
'update': ConnectionTokenUpdateSerializer,
|
'reuse': ConnectionTokenReusableSerializer,
|
||||||
'partial_update': ConnectionTokenUpdateSerializer,
|
|
||||||
}
|
}
|
||||||
http_method_names = ['get', 'post', 'patch', 'head', 'options', 'trace']
|
http_method_names = ['get', 'post', 'patch', 'head', 'options', 'trace']
|
||||||
rbac_perms = {
|
rbac_perms = {
|
||||||
'list': 'authentication.view_connectiontoken',
|
'list': 'authentication.view_connectiontoken',
|
||||||
'retrieve': 'authentication.view_connectiontoken',
|
'retrieve': 'authentication.view_connectiontoken',
|
||||||
'update': 'authentication.change_connectiontoken',
|
|
||||||
'create': 'authentication.add_connectiontoken',
|
'create': 'authentication.add_connectiontoken',
|
||||||
'exchange': 'authentication.add_connectiontoken',
|
'exchange': 'authentication.add_connectiontoken',
|
||||||
'expire': 'authentication.change_connectiontoken',
|
'reuse': 'authentication.reuse_connectiontoken',
|
||||||
|
'expire': 'authentication.expire_connectiontoken',
|
||||||
'get_rdp_file': 'authentication.add_connectiontoken',
|
'get_rdp_file': 'authentication.add_connectiontoken',
|
||||||
'get_client_protocol_url': 'authentication.add_connectiontoken',
|
'get_client_protocol_url': 'authentication.add_connectiontoken',
|
||||||
}
|
}
|
||||||
|
@ -282,13 +296,17 @@ class ConnectionTokenViewSet(ExtraActionApiMixin, RootOrgViewMixin, JMSModelView
|
||||||
data['org_id'] = asset.org_id
|
data['org_id'] = asset.org_id
|
||||||
data['user'] = user
|
data['user'] = user
|
||||||
data['value'] = random_string(16)
|
data['value'] = random_string(16)
|
||||||
|
|
||||||
|
if account_name == AliasAccount.ANON and asset.category not in ['web', 'custom']:
|
||||||
|
raise ValidationError(_('Anonymous account is not supported for this asset'))
|
||||||
|
|
||||||
account = self._validate_perm(user, asset, account_name)
|
account = self._validate_perm(user, asset, account_name)
|
||||||
if account.has_secret:
|
if account.has_secret:
|
||||||
data['input_secret'] = ''
|
data['input_secret'] = ''
|
||||||
|
|
||||||
if account.username != '@INPUT':
|
if account.username != AliasAccount.INPUT:
|
||||||
data['input_username'] = ''
|
data['input_username'] = ''
|
||||||
if account.username == '@USER':
|
elif account.username == AliasAccount.USER:
|
||||||
data['input_username'] = user.username
|
data['input_username'] = user.username
|
||||||
|
|
||||||
ticket = self._validate_acl(user, asset, account)
|
ticket = self._validate_acl(user, asset, account)
|
||||||
|
@ -341,7 +359,7 @@ class SuperConnectionTokenViewSet(ConnectionTokenViewSet):
|
||||||
rbac_perms = {
|
rbac_perms = {
|
||||||
'create': 'authentication.add_superconnectiontoken',
|
'create': 'authentication.add_superconnectiontoken',
|
||||||
'renewal': 'authentication.add_superconnectiontoken',
|
'renewal': 'authentication.add_superconnectiontoken',
|
||||||
'get_secret_detail': 'authentication.view_connectiontokensecret',
|
'get_secret_detail': 'authentication.view_superconnectiontokensecret',
|
||||||
'get_applet_info': 'authentication.view_superconnectiontoken',
|
'get_applet_info': 'authentication.view_superconnectiontoken',
|
||||||
'release_applet_account': 'authentication.view_superconnectiontoken',
|
'release_applet_account': 'authentication.view_superconnectiontoken',
|
||||||
}
|
}
|
||||||
|
@ -371,7 +389,7 @@ class SuperConnectionTokenViewSet(ConnectionTokenViewSet):
|
||||||
@action(methods=['POST'], detail=False, url_path='secret')
|
@action(methods=['POST'], detail=False, url_path='secret')
|
||||||
def get_secret_detail(self, request, *args, **kwargs):
|
def get_secret_detail(self, request, *args, **kwargs):
|
||||||
""" 非常重要的 api, 在逻辑层再判断一下 rbac 权限, 双重保险 """
|
""" 非常重要的 api, 在逻辑层再判断一下 rbac 权限, 双重保险 """
|
||||||
rbac_perm = 'authentication.view_connectiontokensecret'
|
rbac_perm = 'authentication.view_superconnectiontokensecret'
|
||||||
if not request.user.has_perm(rbac_perm):
|
if not request.user.has_perm(rbac_perm):
|
||||||
raise PermissionDenied('Not allow to view secret')
|
raise PermissionDenied('Not allow to view secret')
|
||||||
|
|
||||||
|
|
|
@ -1,3 +1,4 @@
|
||||||
|
from django.http import HttpResponseRedirect
|
||||||
from rest_framework.generics import CreateAPIView
|
from rest_framework.generics import CreateAPIView
|
||||||
from rest_framework.response import Response
|
from rest_framework.response import Response
|
||||||
from rest_framework.permissions import AllowAny
|
from rest_framework.permissions import AllowAny
|
||||||
|
@ -41,7 +42,7 @@ class UserResetPasswordSendCodeApi(CreateAPIView):
|
||||||
token = request.GET.get('token')
|
token = request.GET.get('token')
|
||||||
userinfo = cache.get(token)
|
userinfo = cache.get(token)
|
||||||
if not userinfo:
|
if not userinfo:
|
||||||
return reverse('authentication:forgot-previewing')
|
return HttpResponseRedirect(reverse('authentication:forgot-previewing'))
|
||||||
|
|
||||||
serializer = self.get_serializer(data=request.data)
|
serializer = self.get_serializer(data=request.data)
|
||||||
serializer.is_valid(raise_exception=True)
|
serializer.is_valid(raise_exception=True)
|
||||||
|
|
|
@ -9,6 +9,7 @@ from django_auth_ldap.config import _LDAPConfig, LDAPSearch, LDAPSearchUnion
|
||||||
|
|
||||||
from users.utils import construct_user_email
|
from users.utils import construct_user_email
|
||||||
from common.const import LDAP_AD_ACCOUNT_DISABLE
|
from common.const import LDAP_AD_ACCOUNT_DISABLE
|
||||||
|
from common.utils.http import is_true
|
||||||
from .base import JMSBaseAuthBackend
|
from .base import JMSBaseAuthBackend
|
||||||
|
|
||||||
logger = _LDAPConfig.get_logger()
|
logger = _LDAPConfig.get_logger()
|
||||||
|
@ -162,10 +163,11 @@ class LDAPUser(_LDAPUser):
|
||||||
try:
|
try:
|
||||||
value = self.attrs[attr][0]
|
value = self.attrs[attr][0]
|
||||||
value = value.strip()
|
value = value.strip()
|
||||||
if attr.lower() == 'useraccountcontrol' \
|
if field == 'is_active':
|
||||||
and field == 'is_active' and value:
|
if attr.lower() == 'useraccountcontrol' and value:
|
||||||
value = int(value) & LDAP_AD_ACCOUNT_DISABLE \
|
value = int(value) & LDAP_AD_ACCOUNT_DISABLE != LDAP_AD_ACCOUNT_DISABLE
|
||||||
!= LDAP_AD_ACCOUNT_DISABLE
|
else:
|
||||||
|
value = is_true(value)
|
||||||
except LookupError:
|
except LookupError:
|
||||||
logger.warning("{} does not have a value for the attribute {}".format(self.dn, attr))
|
logger.warning("{} does not have a value for the attribute {}".format(self.dn, attr))
|
||||||
else:
|
else:
|
||||||
|
|
|
@ -0,0 +1,24 @@
|
||||||
|
# Generated by Django 3.2.19 on 2023-07-13 06:59
|
||||||
|
|
||||||
|
from django.db import migrations
|
||||||
|
|
||||||
|
|
||||||
|
class Migration(migrations.Migration):
|
||||||
|
dependencies = [
|
||||||
|
('authentication', '0020_connectiontoken_connect_options'),
|
||||||
|
]
|
||||||
|
|
||||||
|
operations = [
|
||||||
|
migrations.AlterModelOptions(
|
||||||
|
name='connectiontoken',
|
||||||
|
options={'ordering': ('-date_expired',),
|
||||||
|
'permissions': [('expire_connectiontoken', 'Can expire connection token'),
|
||||||
|
('reuse_connectiontoken', 'Can reuse connection token')],
|
||||||
|
'verbose_name': 'Connection token'},
|
||||||
|
),
|
||||||
|
migrations.AlterModelOptions(
|
||||||
|
name='superconnectiontoken',
|
||||||
|
options={'permissions': [('view_superconnectiontokensecret', 'Can view super connection token secret')],
|
||||||
|
'verbose_name': 'Super connection token'},
|
||||||
|
),
|
||||||
|
]
|
|
@ -9,6 +9,7 @@ from django.utils import timezone
|
||||||
from django.utils.translation import ugettext_lazy as _
|
from django.utils.translation import ugettext_lazy as _
|
||||||
from rest_framework.exceptions import PermissionDenied
|
from rest_framework.exceptions import PermissionDenied
|
||||||
|
|
||||||
|
from accounts.const import AliasAccount
|
||||||
from assets.const import Protocol
|
from assets.const import Protocol
|
||||||
from assets.const.host import GATEWAY_NAME
|
from assets.const.host import GATEWAY_NAME
|
||||||
from common.db.fields import EncryptTextField
|
from common.db.fields import EncryptTextField
|
||||||
|
@ -21,7 +22,7 @@ from terminal.models import Applet
|
||||||
|
|
||||||
|
|
||||||
def date_expired_default():
|
def date_expired_default():
|
||||||
return timezone.now() + timedelta(seconds=settings.CONNECTION_TOKEN_EXPIRATION)
|
return timezone.now() + timedelta(seconds=settings.CONNECTION_TOKEN_ONETIME_EXPIRATION)
|
||||||
|
|
||||||
|
|
||||||
class ConnectionToken(JMSOrgBaseModel):
|
class ConnectionToken(JMSOrgBaseModel):
|
||||||
|
@ -53,10 +54,11 @@ class ConnectionToken(JMSOrgBaseModel):
|
||||||
|
|
||||||
class Meta:
|
class Meta:
|
||||||
ordering = ('-date_expired',)
|
ordering = ('-date_expired',)
|
||||||
verbose_name = _('Connection token')
|
|
||||||
permissions = [
|
permissions = [
|
||||||
('view_connectiontokensecret', _('Can view connection token secret'))
|
('expire_connectiontoken', _('Can expire connection token')),
|
||||||
|
('reuse_connectiontoken', _('Can reuse connection token')),
|
||||||
]
|
]
|
||||||
|
verbose_name = _('Connection token')
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def is_expired(self):
|
def is_expired(self):
|
||||||
|
@ -79,6 +81,15 @@ class ConnectionToken(JMSOrgBaseModel):
|
||||||
self.date_expired = timezone.now()
|
self.date_expired = timezone.now()
|
||||||
self.save(update_fields=['date_expired'])
|
self.save(update_fields=['date_expired'])
|
||||||
|
|
||||||
|
def set_reusable(self, is_reusable):
|
||||||
|
self.is_reusable = is_reusable
|
||||||
|
if self.is_reusable:
|
||||||
|
seconds = settings.CONNECTION_TOKEN_REUSABLE_EXPIRATION
|
||||||
|
else:
|
||||||
|
seconds = settings.CONNECTION_TOKEN_ONETIME_EXPIRATION
|
||||||
|
self.date_expired = timezone.now() + timedelta(seconds=seconds)
|
||||||
|
self.save(update_fields=['is_reusable', 'date_expired'])
|
||||||
|
|
||||||
def renewal(self):
|
def renewal(self):
|
||||||
""" 续期 Token,将来支持用户自定义创建 token 后,续期策略要修改 """
|
""" 续期 Token,将来支持用户自定义创建 token 后,续期策略要修改 """
|
||||||
self.date_expired = date_expired_default()
|
self.date_expired = date_expired_default()
|
||||||
|
@ -175,7 +186,7 @@ class ConnectionToken(JMSOrgBaseModel):
|
||||||
if not applet:
|
if not applet:
|
||||||
return None
|
return None
|
||||||
|
|
||||||
host_account = applet.select_host_account(self.user)
|
host_account = applet.select_host_account(self.user, self.asset)
|
||||||
if not host_account:
|
if not host_account:
|
||||||
raise JMSException({'error': 'No host account available'})
|
raise JMSException({'error': 'No host account available'})
|
||||||
|
|
||||||
|
@ -209,29 +220,19 @@ class ConnectionToken(JMSOrgBaseModel):
|
||||||
if not self.asset:
|
if not self.asset:
|
||||||
return None
|
return None
|
||||||
|
|
||||||
account = self.asset.accounts.filter(name=self.account).first()
|
if self.account.startswith('@'):
|
||||||
if self.account == '@INPUT' or not account:
|
account = Account.get_special_account(self.account)
|
||||||
data = {
|
account.asset = self.asset
|
||||||
'name': self.account,
|
account.org_id = self.asset.org_id
|
||||||
'username': self.input_username,
|
|
||||||
'secret_type': 'password',
|
if self.account in [AliasAccount.INPUT, AliasAccount.USER]:
|
||||||
'secret': self.input_secret,
|
account.username = self.input_username
|
||||||
'su_from': None,
|
account.secret = self.input_secret
|
||||||
'org_id': self.asset.org_id,
|
|
||||||
'asset': self.asset
|
|
||||||
}
|
|
||||||
else:
|
else:
|
||||||
data = {
|
account = self.asset.accounts.filter(name=self.account).first()
|
||||||
'name': account.name,
|
if not account.secret and self.input_secret:
|
||||||
'username': account.username,
|
account.secret = self.input_secret
|
||||||
'secret_type': account.secret_type,
|
return account
|
||||||
'secret': account.secret or self.input_secret,
|
|
||||||
'su_from': account.su_from,
|
|
||||||
'org_id': account.org_id,
|
|
||||||
'privileged': account.privileged,
|
|
||||||
'asset': self.asset
|
|
||||||
}
|
|
||||||
return Account(**data)
|
|
||||||
|
|
||||||
@lazyproperty
|
@lazyproperty
|
||||||
def domain(self):
|
def domain(self):
|
||||||
|
@ -264,4 +265,7 @@ class ConnectionToken(JMSOrgBaseModel):
|
||||||
class SuperConnectionToken(ConnectionToken):
|
class SuperConnectionToken(ConnectionToken):
|
||||||
class Meta:
|
class Meta:
|
||||||
proxy = True
|
proxy = True
|
||||||
|
permissions = [
|
||||||
|
('view_superconnectiontokensecret', _('Can view super connection token secret'))
|
||||||
|
]
|
||||||
verbose_name = _("Super connection token")
|
verbose_name = _("Super connection token")
|
||||||
|
|
|
@ -1,20 +1,18 @@
|
||||||
from django.conf import settings
|
|
||||||
from django.utils import timezone
|
|
||||||
from django.utils.translation import ugettext_lazy as _
|
from django.utils.translation import ugettext_lazy as _
|
||||||
from rest_framework import serializers
|
from rest_framework import serializers
|
||||||
|
|
||||||
|
from common.serializers import CommonModelSerializer
|
||||||
from common.serializers.fields import EncryptedField
|
from common.serializers.fields import EncryptedField
|
||||||
from orgs.mixins.serializers import OrgResourceModelSerializerMixin
|
|
||||||
from perms.serializers.permission import ActionChoicesField
|
from perms.serializers.permission import ActionChoicesField
|
||||||
from ..models import ConnectionToken
|
from ..models import ConnectionToken
|
||||||
|
|
||||||
__all__ = [
|
__all__ = [
|
||||||
'ConnectionTokenSerializer', 'SuperConnectionTokenSerializer',
|
'ConnectionTokenSerializer', 'SuperConnectionTokenSerializer',
|
||||||
'ConnectionTokenUpdateSerializer',
|
'ConnectionTokenReusableSerializer',
|
||||||
]
|
]
|
||||||
|
|
||||||
|
|
||||||
class ConnectionTokenSerializer(OrgResourceModelSerializerMixin):
|
class ConnectionTokenSerializer(CommonModelSerializer):
|
||||||
expire_time = serializers.IntegerField(read_only=True, label=_('Expired time'))
|
expire_time = serializers.IntegerField(read_only=True, label=_('Expired time'))
|
||||||
input_secret = EncryptedField(
|
input_secret = EncryptedField(
|
||||||
label=_("Input secret"), max_length=40960, required=False, allow_blank=True
|
label=_("Input secret"), max_length=40960, required=False, allow_blank=True
|
||||||
|
@ -60,30 +58,12 @@ class ConnectionTokenSerializer(OrgResourceModelSerializerMixin):
|
||||||
return info
|
return info
|
||||||
|
|
||||||
|
|
||||||
class ConnectionTokenUpdateSerializer(ConnectionTokenSerializer):
|
class ConnectionTokenReusableSerializer(CommonModelSerializer):
|
||||||
class Meta(ConnectionTokenSerializer.Meta):
|
class Meta:
|
||||||
|
model = ConnectionToken
|
||||||
|
fields = ['id', 'date_expired', 'is_reusable']
|
||||||
can_update_fields = ['is_reusable']
|
can_update_fields = ['is_reusable']
|
||||||
read_only_fields = list(set(ConnectionTokenSerializer.Meta.fields) - set(can_update_fields))
|
read_only_fields = list(set(fields) - set(can_update_fields))
|
||||||
|
|
||||||
def _get_date_expired(self):
|
|
||||||
delta = self.instance.date_expired - self.instance.date_created
|
|
||||||
if delta.total_seconds() > 3600 * 24:
|
|
||||||
return self.instance.date_expired
|
|
||||||
|
|
||||||
seconds = settings.CONNECTION_TOKEN_EXPIRATION_MAX
|
|
||||||
return timezone.now() + timezone.timedelta(seconds=seconds)
|
|
||||||
|
|
||||||
@staticmethod
|
|
||||||
def validate_is_reusable(value):
|
|
||||||
if value and not settings.CONNECTION_TOKEN_REUSABLE:
|
|
||||||
raise serializers.ValidationError(_('Reusable connection token is not allowed, global setting not enabled'))
|
|
||||||
return value
|
|
||||||
|
|
||||||
def validate(self, attrs):
|
|
||||||
reusable = attrs.get('is_reusable', False)
|
|
||||||
if reusable:
|
|
||||||
attrs['date_expired'] = self._get_date_expired()
|
|
||||||
return attrs
|
|
||||||
|
|
||||||
|
|
||||||
class SuperConnectionTokenSerializer(ConnectionTokenSerializer):
|
class SuperConnectionTokenSerializer(ConnectionTokenSerializer):
|
||||||
|
|
|
@ -2,15 +2,19 @@
|
||||||
#
|
#
|
||||||
|
|
||||||
from __future__ import unicode_literals
|
from __future__ import unicode_literals
|
||||||
import os
|
|
||||||
import datetime
|
import datetime
|
||||||
|
import os
|
||||||
from typing import Callable
|
from typing import Callable
|
||||||
|
|
||||||
from django.db import IntegrityError
|
from django.conf import settings
|
||||||
from django.templatetags.static import static
|
from django.contrib.auth import BACKEND_SESSION_KEY
|
||||||
from django.contrib.auth import login as auth_login, logout as auth_logout
|
from django.contrib.auth import login as auth_login, logout as auth_logout
|
||||||
from django.http import HttpResponse, HttpRequest
|
from django.db import IntegrityError
|
||||||
|
from django.http import HttpRequest
|
||||||
from django.shortcuts import reverse, redirect
|
from django.shortcuts import reverse, redirect
|
||||||
|
from django.templatetags.static import static
|
||||||
|
from django.urls import reverse_lazy
|
||||||
from django.utils.decorators import method_decorator
|
from django.utils.decorators import method_decorator
|
||||||
from django.utils.translation import ugettext as _, get_language
|
from django.utils.translation import ugettext as _, get_language
|
||||||
from django.views.decorators.cache import never_cache
|
from django.views.decorators.cache import never_cache
|
||||||
|
@ -18,16 +22,13 @@ from django.views.decorators.csrf import csrf_protect
|
||||||
from django.views.decorators.debug import sensitive_post_parameters
|
from django.views.decorators.debug import sensitive_post_parameters
|
||||||
from django.views.generic.base import TemplateView, RedirectView
|
from django.views.generic.base import TemplateView, RedirectView
|
||||||
from django.views.generic.edit import FormView
|
from django.views.generic.edit import FormView
|
||||||
from django.conf import settings
|
|
||||||
from django.urls import reverse_lazy
|
|
||||||
from django.contrib.auth import BACKEND_SESSION_KEY
|
|
||||||
|
|
||||||
from common.utils import FlashMessageUtil, static_or_direct
|
from common.utils import FlashMessageUtil, static_or_direct
|
||||||
from users.utils import (
|
from users.utils import (
|
||||||
redirect_user_first_login_or_index
|
redirect_user_first_login_or_index
|
||||||
)
|
)
|
||||||
from ..const import RSA_PRIVATE_KEY, RSA_PUBLIC_KEY
|
|
||||||
from .. import mixins, errors
|
from .. import mixins, errors
|
||||||
|
from ..const import RSA_PRIVATE_KEY, RSA_PUBLIC_KEY
|
||||||
from ..forms import get_user_login_form_cls
|
from ..forms import get_user_login_form_cls
|
||||||
|
|
||||||
__all__ = [
|
__all__ = [
|
||||||
|
@ -203,7 +204,9 @@ class UserLoginView(mixins.AuthMixin, UserLoginContextMixin, FormView):
|
||||||
|
|
||||||
def form_valid(self, form):
|
def form_valid(self, form):
|
||||||
if not self.request.session.test_cookie_worked():
|
if not self.request.session.test_cookie_worked():
|
||||||
return HttpResponse(_("Please enable cookies and try again."))
|
form.add_error(None, _("Login timeout, please try again."))
|
||||||
|
return self.form_invalid(form)
|
||||||
|
|
||||||
# https://docs.djangoproject.com/en/3.1/topics/http/sessions/#setting-test-cookies
|
# https://docs.djangoproject.com/en/3.1/topics/http/sessions/#setting-test-cookies
|
||||||
self.request.session.delete_test_cookie()
|
self.request.session.delete_test_cookie()
|
||||||
|
|
||||||
|
|
|
@ -10,6 +10,8 @@ from common.drf.filters import IDSpmFilter, CustomFilter, IDInFilter
|
||||||
|
|
||||||
__all__ = ['ExtraFilterFieldsMixin', 'OrderingFielderFieldsMixin']
|
__all__ = ['ExtraFilterFieldsMixin', 'OrderingFielderFieldsMixin']
|
||||||
|
|
||||||
|
logger = logging.getLogger('jumpserver.common')
|
||||||
|
|
||||||
|
|
||||||
class ExtraFilterFieldsMixin:
|
class ExtraFilterFieldsMixin:
|
||||||
"""
|
"""
|
||||||
|
@ -54,7 +56,9 @@ class OrderingFielderFieldsMixin:
|
||||||
try:
|
try:
|
||||||
valid_fields = self.get_valid_ordering_fields()
|
valid_fields = self.get_valid_ordering_fields()
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
logging.debug('get_valid_ordering_fields error: %s' % e)
|
logger.debug('get_valid_ordering_fields error: %s' % e)
|
||||||
|
# 这里千万不要这么用,会让 logging 重复,至于为什么,我也不知道
|
||||||
|
# logging.debug('get_valid_ordering_fields error: %s' % e)
|
||||||
valid_fields = []
|
valid_fields = []
|
||||||
|
|
||||||
fields = list(chain(
|
fields = list(chain(
|
||||||
|
|
|
@ -40,7 +40,7 @@ class SignatureAuthentication(authentication.BaseAuthentication):
|
||||||
required_headers = ["(request-target)", "date"]
|
required_headers = ["(request-target)", "date"]
|
||||||
|
|
||||||
def fetch_user_data(self, key_id, algorithm=None):
|
def fetch_user_data(self, key_id, algorithm=None):
|
||||||
"""Retuns a tuple (User, secret) or (None, None)."""
|
"""Returns a tuple (User, secret) or (None, None)."""
|
||||||
raise NotImplementedError()
|
raise NotImplementedError()
|
||||||
|
|
||||||
def authenticate_header(self, request):
|
def authenticate_header(self, request):
|
||||||
|
|
|
@ -328,13 +328,13 @@ class RelatedManager:
|
||||||
q = Q()
|
q = Q()
|
||||||
if isinstance(val, str):
|
if isinstance(val, str):
|
||||||
val = [val]
|
val = [val]
|
||||||
|
if ['*'] in val:
|
||||||
|
return Q()
|
||||||
for ip in val:
|
for ip in val:
|
||||||
if not ip:
|
if not ip:
|
||||||
continue
|
continue
|
||||||
try:
|
try:
|
||||||
if ip == '*':
|
if '/' in ip:
|
||||||
return Q()
|
|
||||||
elif '/' in ip:
|
|
||||||
network = ipaddress.ip_network(ip)
|
network = ipaddress.ip_network(ip)
|
||||||
ips = network.hosts()
|
ips = network.hosts()
|
||||||
q |= Q(**{"{}__in".format(name): ips})
|
q |= Q(**{"{}__in".format(name): ips})
|
||||||
|
@ -378,7 +378,7 @@ class RelatedManager:
|
||||||
|
|
||||||
if match == 'ip_in':
|
if match == 'ip_in':
|
||||||
q = cls.get_ip_in_q(name, val)
|
q = cls.get_ip_in_q(name, val)
|
||||||
elif match in ("exact", "contains", "startswith", "endswith", "gte", "lte", "gt", "lt"):
|
elif match in ("contains", "startswith", "endswith", "gte", "lte", "gt", "lt"):
|
||||||
lookup = "{}__{}".format(name, match)
|
lookup = "{}__{}".format(name, match)
|
||||||
q = Q(**{lookup: val})
|
q = Q(**{lookup: val})
|
||||||
elif match == 'regex':
|
elif match == 'regex':
|
||||||
|
@ -387,7 +387,7 @@ class RelatedManager:
|
||||||
lookup = "{}__{}".format(name, match)
|
lookup = "{}__{}".format(name, match)
|
||||||
q = Q(**{lookup: val})
|
q = Q(**{lookup: val})
|
||||||
except re.error:
|
except re.error:
|
||||||
q = ~Q()
|
q = Q(pk__isnull=True)
|
||||||
elif match == "not":
|
elif match == "not":
|
||||||
q = ~Q(**{name: val})
|
q = ~Q(**{name: val})
|
||||||
elif match in ['m2m', 'in']:
|
elif match in ['m2m', 'in']:
|
||||||
|
@ -459,7 +459,7 @@ class JSONManyToManyDescriptor:
|
||||||
|
|
||||||
custom_q = Q()
|
custom_q = Q()
|
||||||
for rule in attr_rules:
|
for rule in attr_rules:
|
||||||
value = getattr(obj, rule['name'], '')
|
value = getattr(obj, rule['name'], None) or ''
|
||||||
rule_value = rule.get('value', '')
|
rule_value = rule.get('value', '')
|
||||||
rule_match = rule.get('match', 'exact')
|
rule_match = rule.get('match', 'exact')
|
||||||
|
|
||||||
|
@ -470,11 +470,11 @@ class JSONManyToManyDescriptor:
|
||||||
continue
|
continue
|
||||||
|
|
||||||
if rule_match == 'in':
|
if rule_match == 'in':
|
||||||
res &= value in rule_value
|
res &= value in rule_value or '*' in rule_value
|
||||||
elif rule_match == 'exact':
|
elif rule_match == 'exact':
|
||||||
res &= value == rule_value
|
res &= value == rule_value or rule_value == '*'
|
||||||
elif rule_match == 'contains':
|
elif rule_match == 'contains':
|
||||||
res &= rule_value in value
|
res &= (rule_value in value)
|
||||||
elif rule_match == 'startswith':
|
elif rule_match == 'startswith':
|
||||||
res &= str(value).startswith(str(rule_value))
|
res &= str(value).startswith(str(rule_value))
|
||||||
elif rule_match == 'endswith':
|
elif rule_match == 'endswith':
|
||||||
|
@ -499,7 +499,7 @@ class JSONManyToManyDescriptor:
|
||||||
elif rule['match'] == 'ip_in':
|
elif rule['match'] == 'ip_in':
|
||||||
if isinstance(rule_value, str):
|
if isinstance(rule_value, str):
|
||||||
rule_value = [rule_value]
|
rule_value = [rule_value]
|
||||||
res &= contains_ip(value, rule_value)
|
res &= '*' in rule_value or contains_ip(value, rule_value)
|
||||||
elif rule['match'] == 'm2m':
|
elif rule['match'] == 'm2m':
|
||||||
if isinstance(value, Manager):
|
if isinstance(value, Manager):
|
||||||
value = value.values_list('id', flat=True)
|
value = value.values_list('id', flat=True)
|
||||||
|
|
|
@ -6,6 +6,7 @@ import inspect
|
||||||
import threading
|
import threading
|
||||||
import time
|
import time
|
||||||
from concurrent.futures import ThreadPoolExecutor
|
from concurrent.futures import ThreadPoolExecutor
|
||||||
|
from functools import wraps
|
||||||
|
|
||||||
from django.db import transaction
|
from django.db import transaction
|
||||||
|
|
||||||
|
@ -217,3 +218,24 @@ def do_test():
|
||||||
end = time.time()
|
end = time.time()
|
||||||
using = end - s
|
using = end - s
|
||||||
print("end : %s, using: %s" % (end, using))
|
print("end : %s, using: %s" % (end, using))
|
||||||
|
|
||||||
|
|
||||||
|
def cached_method(ttl=20):
|
||||||
|
_cache = {}
|
||||||
|
|
||||||
|
def decorator(func):
|
||||||
|
@wraps(func)
|
||||||
|
def wrapper(*args, **kwargs):
|
||||||
|
key = (func, args, tuple(sorted(kwargs.items())))
|
||||||
|
# 检查缓存是否存在且未过期
|
||||||
|
if key in _cache and time.time() - _cache[key]['timestamp'] < ttl:
|
||||||
|
return _cache[key]['result']
|
||||||
|
|
||||||
|
# 缓存过期或不存在,执行方法并缓存结果
|
||||||
|
result = func(*args, **kwargs)
|
||||||
|
_cache[key] = {'result': result, 'timestamp': time.time()}
|
||||||
|
return result
|
||||||
|
|
||||||
|
return wrapper
|
||||||
|
|
||||||
|
return decorator
|
||||||
|
|
|
@ -14,6 +14,8 @@ from rest_framework.serializers import ValidationError
|
||||||
|
|
||||||
from common import const
|
from common import const
|
||||||
|
|
||||||
|
logger = logging.getLogger('jumpserver.common')
|
||||||
|
|
||||||
__all__ = [
|
__all__ = [
|
||||||
"DatetimeRangeFilter", "IDSpmFilter",
|
"DatetimeRangeFilter", "IDSpmFilter",
|
||||||
'IDInFilter', "CustomFilter",
|
'IDInFilter', "CustomFilter",
|
||||||
|
@ -70,7 +72,7 @@ class DatetimeRangeFilter(filters.BaseFilterBackend):
|
||||||
]
|
]
|
||||||
```
|
```
|
||||||
""".format(view.name)
|
""".format(view.name)
|
||||||
logging.error(msg)
|
logger.error(msg)
|
||||||
raise ImproperlyConfigured(msg)
|
raise ImproperlyConfigured(msg)
|
||||||
|
|
||||||
def filter_queryset(self, request, queryset, view):
|
def filter_queryset(self, request, queryset, view):
|
||||||
|
@ -213,6 +215,6 @@ class AttrRulesFilterBackend(filters.BaseFilterBackend):
|
||||||
except Exception:
|
except Exception:
|
||||||
raise ValidationError({'attr_rules': 'attr_rules should be json'})
|
raise ValidationError({'attr_rules': 'attr_rules should be json'})
|
||||||
|
|
||||||
logging.debug('attr_rules: %s', attr_rules)
|
logger.debug('attr_rules: %s', attr_rules)
|
||||||
q = RelatedManager.get_to_filter_q(attr_rules, queryset.model)
|
q = RelatedManager.get_to_filter_q(attr_rules, queryset.model)
|
||||||
return queryset.filter(q).distinct()
|
return queryset.filter(q).distinct()
|
||||||
|
|
|
@ -52,14 +52,16 @@ class BaseFileParser(BaseParser):
|
||||||
fields_map = {}
|
fields_map = {}
|
||||||
fields = self.serializer_fields
|
fields = self.serializer_fields
|
||||||
for k, v in fields.items():
|
for k, v in fields.items():
|
||||||
if v.read_only:
|
# 资产平台的 id 是只读的, 导入更新资产平台会失败
|
||||||
|
if v.read_only and k not in ['id', 'pk']:
|
||||||
continue
|
continue
|
||||||
fields_map.update({
|
fields_map.update({
|
||||||
v.label: k,
|
v.label: k,
|
||||||
k: k
|
k: k
|
||||||
})
|
})
|
||||||
|
lowercase_fields_map = {k.lower(): v for k, v in fields_map.items()}
|
||||||
field_names = [
|
field_names = [
|
||||||
fields_map.get(column_title.strip('*'), '')
|
lowercase_fields_map.get(column_title.strip('*').lower(), '')
|
||||||
for column_title in column_titles
|
for column_title in column_titles
|
||||||
]
|
]
|
||||||
return field_names
|
return field_names
|
||||||
|
|
|
@ -1,8 +1,10 @@
|
||||||
import multiprocessing
|
import multiprocessing
|
||||||
from django.core.management.base import BaseCommand, CommandError
|
|
||||||
|
from django.core.management.base import BaseCommand
|
||||||
from django.db.models import TextChoices
|
from django.db.models import TextChoices
|
||||||
from .utils import ServicesUtil
|
|
||||||
from .hands import *
|
from .hands import *
|
||||||
|
from .utils import ServicesUtil
|
||||||
|
|
||||||
|
|
||||||
class Services(TextChoices):
|
class Services(TextChoices):
|
||||||
|
@ -92,15 +94,11 @@ class BaseActionCommand(BaseCommand):
|
||||||
super().__init__(*args, **kwargs)
|
super().__init__(*args, **kwargs)
|
||||||
|
|
||||||
def add_arguments(self, parser):
|
def add_arguments(self, parser):
|
||||||
cores = 10
|
|
||||||
if (multiprocessing.cpu_count() * 2 + 1) < cores:
|
|
||||||
cores = multiprocessing.cpu_count() * 2 + 1
|
|
||||||
|
|
||||||
parser.add_argument(
|
parser.add_argument(
|
||||||
'services', nargs='+', choices=Services.export_services_values(), help='Service',
|
'services', nargs='+', choices=Services.export_services_values(), help='Service',
|
||||||
)
|
)
|
||||||
parser.add_argument('-d', '--daemon', nargs="?", const=True)
|
parser.add_argument('-d', '--daemon', nargs="?", const=True)
|
||||||
parser.add_argument('-w', '--worker', type=int, nargs="?", default=cores)
|
parser.add_argument('-w', '--worker', type=int, nargs="?", default=4)
|
||||||
parser.add_argument('-f', '--force', nargs="?", const=True)
|
parser.add_argument('-f', '--force', nargs="?", const=True)
|
||||||
|
|
||||||
def initial_util(self, *args, **options):
|
def initial_util(self, *args, **options):
|
||||||
|
|
|
@ -1,12 +1,14 @@
|
||||||
|
import logging
|
||||||
import os
|
import os
|
||||||
import sys
|
import sys
|
||||||
import logging
|
|
||||||
from django.conf import settings
|
from django.conf import settings
|
||||||
|
|
||||||
from apps.jumpserver.const import CONFIG
|
from apps.jumpserver.const import CONFIG
|
||||||
|
|
||||||
try:
|
try:
|
||||||
from apps.jumpserver import const
|
from apps.jumpserver import const
|
||||||
|
|
||||||
__version__ = const.VERSION
|
__version__ = const.VERSION
|
||||||
except ImportError as e:
|
except ImportError as e:
|
||||||
print("Not found __version__: {}".format(e))
|
print("Not found __version__: {}".format(e))
|
||||||
|
@ -15,12 +17,11 @@ except ImportError as e:
|
||||||
__version__ = 'Unknown'
|
__version__ = 'Unknown'
|
||||||
sys.exit(1)
|
sys.exit(1)
|
||||||
|
|
||||||
|
|
||||||
HTTP_HOST = CONFIG.HTTP_BIND_HOST or '127.0.0.1'
|
HTTP_HOST = CONFIG.HTTP_BIND_HOST or '127.0.0.1'
|
||||||
HTTP_PORT = CONFIG.HTTP_LISTEN_PORT or 8080
|
HTTP_PORT = CONFIG.HTTP_LISTEN_PORT or 8080
|
||||||
WS_PORT = CONFIG.WS_LISTEN_PORT or 8082
|
WS_PORT = CONFIG.WS_LISTEN_PORT or 8082
|
||||||
DEBUG = CONFIG.DEBUG or False
|
DEBUG = CONFIG.DEBUG or False
|
||||||
BASE_DIR = os.path.dirname(settings.BASE_DIR)
|
BASE_DIR = os.path.dirname(settings.BASE_DIR)
|
||||||
LOG_DIR = os.path.join(BASE_DIR, 'logs')
|
LOG_DIR = os.path.join(BASE_DIR, 'data', 'logs')
|
||||||
APPS_DIR = os.path.join(BASE_DIR, 'apps')
|
APPS_DIR = os.path.join(BASE_DIR, 'apps')
|
||||||
TMP_DIR = os.path.join(BASE_DIR, 'tmp')
|
TMP_DIR = os.path.join(BASE_DIR, 'tmp')
|
||||||
|
|
|
@ -1,5 +1,5 @@
|
||||||
from ..hands import *
|
|
||||||
from .base import BaseService
|
from .base import BaseService
|
||||||
|
from ..hands import *
|
||||||
|
|
||||||
__all__ = ['GunicornService']
|
__all__ = ['GunicornService']
|
||||||
|
|
||||||
|
@ -22,7 +22,8 @@ class GunicornService(BaseService):
|
||||||
'-b', bind,
|
'-b', bind,
|
||||||
'-k', 'uvicorn.workers.UvicornWorker',
|
'-k', 'uvicorn.workers.UvicornWorker',
|
||||||
'-w', str(self.worker),
|
'-w', str(self.worker),
|
||||||
'--max-requests', '4096',
|
'--max-requests', '10240',
|
||||||
|
'--max-requests-jitter', '2048',
|
||||||
'--access-logformat', log_format,
|
'--access-logformat', log_format,
|
||||||
'--access-logfile', '-'
|
'--access-logfile', '-'
|
||||||
]
|
]
|
||||||
|
|
|
@ -44,19 +44,24 @@ def set_default_by_type(tp, data, field_info):
|
||||||
|
|
||||||
def create_serializer_class(serializer_name, fields_info):
|
def create_serializer_class(serializer_name, fields_info):
|
||||||
serializer_fields = {}
|
serializer_fields = {}
|
||||||
fields_name = ['name', 'label', 'default', 'type', 'help_text']
|
fields_name = ['name', 'label', 'default', 'required', 'type', 'help_text']
|
||||||
|
|
||||||
for i, field_info in enumerate(fields_info):
|
for i, field_info in enumerate(fields_info):
|
||||||
data = {k: field_info.get(k) for k in fields_name}
|
data = {k: field_info.get(k) for k in fields_name}
|
||||||
field_type = data.pop('type', 'str')
|
field_type = data.pop('type', 'str')
|
||||||
|
|
||||||
if data.get('default') is None:
|
# 用户定义 default 和 required 可能会冲突, 所以要处理一下
|
||||||
|
default = data.get('default', None)
|
||||||
|
if default is None:
|
||||||
data.pop('default', None)
|
data.pop('default', None)
|
||||||
data['required'] = field_info.get('required', True)
|
data['required'] = True
|
||||||
|
elif default == '':
|
||||||
|
data['required'] = False
|
||||||
|
data['allow_blank'] = True
|
||||||
|
else:
|
||||||
|
data['required'] = False
|
||||||
data = set_default_by_type(field_type, data, field_info)
|
data = set_default_by_type(field_type, data, field_info)
|
||||||
data = set_default_if_need(data, i)
|
data = set_default_if_need(data, i)
|
||||||
if data.get('default', None) is not None:
|
|
||||||
data['required'] = False
|
|
||||||
field_name = data.pop('name')
|
field_name = data.pop('name')
|
||||||
field_class = type_field_map.get(field_type, serializers.CharField)
|
field_class = type_field_map.get(field_type, serializers.CharField)
|
||||||
serializer_fields[field_name] = field_class(**data)
|
serializer_fields[field_name] = field_class(**data)
|
||||||
|
|
|
@ -212,6 +212,23 @@ class BitChoicesField(TreeChoicesField):
|
||||||
|
|
||||||
|
|
||||||
class PhoneField(serializers.CharField):
|
class PhoneField(serializers.CharField):
|
||||||
|
|
||||||
|
def to_internal_value(self, data):
|
||||||
|
if isinstance(data, dict):
|
||||||
|
code = data.get('code')
|
||||||
|
phone = data.get('phone', '')
|
||||||
|
if code and phone:
|
||||||
|
data = '{}{}'.format(code, phone)
|
||||||
|
else:
|
||||||
|
data = phone
|
||||||
|
try:
|
||||||
|
phone = phonenumbers.parse(data, 'CN')
|
||||||
|
data = '{}{}'.format(phone.country_code, phone.national_number)
|
||||||
|
except phonenumbers.NumberParseException:
|
||||||
|
data = '+86{}'.format(data)
|
||||||
|
|
||||||
|
return super().to_internal_value(data)
|
||||||
|
|
||||||
def to_representation(self, value):
|
def to_representation(self, value):
|
||||||
if value:
|
if value:
|
||||||
try:
|
try:
|
||||||
|
|
|
@ -1,6 +1,5 @@
|
||||||
# -*- coding: utf-8 -*-
|
# -*- coding: utf-8 -*-
|
||||||
#
|
#
|
||||||
import logging
|
|
||||||
import os
|
import os
|
||||||
import re
|
import re
|
||||||
from collections import defaultdict
|
from collections import defaultdict
|
||||||
|
@ -14,9 +13,10 @@ from django.dispatch import receiver
|
||||||
from jumpserver.utils import get_current_request
|
from jumpserver.utils import get_current_request
|
||||||
from .local import thread_local
|
from .local import thread_local
|
||||||
from .signals import django_ready
|
from .signals import django_ready
|
||||||
|
from .utils import get_logger
|
||||||
|
|
||||||
pattern = re.compile(r'FROM `(\w+)`')
|
pattern = re.compile(r'FROM `(\w+)`')
|
||||||
logger = logging.getLogger("jumpserver.common")
|
logger = get_logger(__name__)
|
||||||
|
|
||||||
|
|
||||||
class Counter:
|
class Counter:
|
||||||
|
@ -129,7 +129,6 @@ else:
|
||||||
|
|
||||||
@receiver(django_ready)
|
@receiver(django_ready)
|
||||||
def check_migrations_file_prefix_conflict(*args, **kwargs):
|
def check_migrations_file_prefix_conflict(*args, **kwargs):
|
||||||
|
|
||||||
if not settings.DEBUG_DEV:
|
if not settings.DEBUG_DEV:
|
||||||
return
|
return
|
||||||
|
|
||||||
|
@ -172,7 +171,7 @@ def check_migrations_file_prefix_conflict(*args, **kwargs):
|
||||||
if not conflict_count:
|
if not conflict_count:
|
||||||
return
|
return
|
||||||
|
|
||||||
print('='*80)
|
print('=' * 80)
|
||||||
for conflict_file in conflict_files:
|
for conflict_file in conflict_files:
|
||||||
msg_dir = '{:<15}'.format(conflict_file[0])
|
msg_dir = '{:<15}'.format(conflict_file[0])
|
||||||
msg_split = '=> '
|
msg_split = '=> '
|
||||||
|
@ -181,4 +180,4 @@ def check_migrations_file_prefix_conflict(*args, **kwargs):
|
||||||
msg_right2 = ' ' * len(msg_left) + msg_split + conflict_file[2]
|
msg_right2 = ' ' * len(msg_left) + msg_split + conflict_file[2]
|
||||||
print(f'{msg_left}{msg_right1}\n{msg_right2}\n')
|
print(f'{msg_left}{msg_right1}\n{msg_right2}\n')
|
||||||
|
|
||||||
print('='*80)
|
print('=' * 80)
|
||||||
|
|
|
@ -49,6 +49,7 @@ def send_mail_attachment_async(subject, message, recipient_list, attachment_list
|
||||||
if attachment_list is None:
|
if attachment_list is None:
|
||||||
attachment_list = []
|
attachment_list = []
|
||||||
from_email = settings.EMAIL_FROM or settings.EMAIL_HOST_USER
|
from_email = settings.EMAIL_FROM or settings.EMAIL_HOST_USER
|
||||||
|
subject = (settings.EMAIL_SUBJECT_PREFIX or '') + subject
|
||||||
email = EmailMultiAlternatives(
|
email = EmailMultiAlternatives(
|
||||||
subject=subject,
|
subject=subject,
|
||||||
body=message,
|
body=message,
|
||||||
|
|
|
@ -1,15 +1,15 @@
|
||||||
from django.core.cache import cache
|
|
||||||
from django.conf import settings
|
|
||||||
from django.core.mail import send_mail
|
|
||||||
from celery import shared_task
|
from celery import shared_task
|
||||||
|
from django.conf import settings
|
||||||
from common.sdk.sms.exceptions import CodeError, CodeExpired, CodeSendTooFrequently
|
from django.core.cache import cache
|
||||||
from common.sdk.sms.endpoint import SMS
|
|
||||||
from common.exceptions import JMSException
|
|
||||||
from common.utils.random import random_string
|
|
||||||
from common.utils import get_logger
|
|
||||||
from django.utils.translation import gettext_lazy as _
|
from django.utils.translation import gettext_lazy as _
|
||||||
|
|
||||||
|
from common.exceptions import JMSException
|
||||||
|
from common.sdk.sms.endpoint import SMS
|
||||||
|
from common.sdk.sms.exceptions import CodeError, CodeExpired, CodeSendTooFrequently
|
||||||
|
from common.tasks import send_mail_async
|
||||||
|
from common.utils import get_logger
|
||||||
|
from common.utils.random import random_string
|
||||||
|
|
||||||
logger = get_logger(__file__)
|
logger = get_logger(__file__)
|
||||||
|
|
||||||
|
|
||||||
|
@ -78,8 +78,7 @@ class SendAndVerifyCodeUtil(object):
|
||||||
def __send_with_email(self):
|
def __send_with_email(self):
|
||||||
subject = self.other_args.get('subject')
|
subject = self.other_args.get('subject')
|
||||||
message = self.other_args.get('message')
|
message = self.other_args.get('message')
|
||||||
from_email = settings.EMAIL_FROM or settings.EMAIL_HOST_USER
|
send_mail_async(subject, message, [self.target], html_message=message)
|
||||||
send_mail(subject, message, from_email, [self.target], html_message=message)
|
|
||||||
|
|
||||||
def __send(self, code):
|
def __send(self, code):
|
||||||
"""
|
"""
|
||||||
|
|
|
@ -17,9 +17,9 @@ from audits.models import UserLoginLog, PasswordChangeLog, OperateLog, FTPLog, J
|
||||||
from common.utils import lazyproperty
|
from common.utils import lazyproperty
|
||||||
from common.utils.timezone import local_now, local_zero_hour
|
from common.utils.timezone import local_now, local_zero_hour
|
||||||
from ops.const import JobStatus
|
from ops.const import JobStatus
|
||||||
from ops.models import JobExecution
|
|
||||||
from orgs.caches import OrgResourceStatisticsCache
|
from orgs.caches import OrgResourceStatisticsCache
|
||||||
from orgs.utils import current_org
|
from orgs.utils import current_org
|
||||||
|
from terminal.const import RiskLevelChoices
|
||||||
from terminal.models import Session, Command
|
from terminal.models import Session, Command
|
||||||
from terminal.utils import ComponentsPrometheusMetricsUtil
|
from terminal.utils import ComponentsPrometheusMetricsUtil
|
||||||
from users.models import User
|
from users.models import User
|
||||||
|
@ -50,6 +50,10 @@ class DateTimeMixin:
|
||||||
t = local_now() - timezone.timedelta(days=days)
|
t = local_now() - timezone.timedelta(days=days)
|
||||||
return t
|
return t
|
||||||
|
|
||||||
|
@lazyproperty
|
||||||
|
def date_start_end(self):
|
||||||
|
return self.days_to_datetime.date(), local_now().date()
|
||||||
|
|
||||||
@lazyproperty
|
@lazyproperty
|
||||||
def dates_list(self):
|
def dates_list(self):
|
||||||
now = local_now()
|
now = local_now()
|
||||||
|
@ -126,12 +130,6 @@ class DateTimeMixin:
|
||||||
queryset = JobLog.objects.filter(date_created__gte=t)
|
queryset = JobLog.objects.filter(date_created__gte=t)
|
||||||
return queryset
|
return queryset
|
||||||
|
|
||||||
@lazyproperty
|
|
||||||
def jobs_executed_queryset(self):
|
|
||||||
t = self.days_to_datetime
|
|
||||||
queryset = JobExecution.objects.filter(date_created__gte=t)
|
|
||||||
return queryset
|
|
||||||
|
|
||||||
|
|
||||||
class DatesLoginMetricMixin:
|
class DatesLoginMetricMixin:
|
||||||
dates_list: list
|
dates_list: list
|
||||||
|
@ -143,101 +141,40 @@ class DatesLoginMetricMixin:
|
||||||
operate_logs_queryset: OperateLog.objects
|
operate_logs_queryset: OperateLog.objects
|
||||||
password_change_logs_queryset: PasswordChangeLog.objects
|
password_change_logs_queryset: PasswordChangeLog.objects
|
||||||
|
|
||||||
@staticmethod
|
|
||||||
def get_cache_key(date, tp):
|
|
||||||
date_str = date.strftime("%Y%m%d")
|
|
||||||
key = "SESSION_DATE_{}_{}_{}".format(current_org.id, tp, date_str)
|
|
||||||
return key
|
|
||||||
|
|
||||||
def __get_data_from_cache(self, date, tp):
|
|
||||||
if date == timezone.now().date():
|
|
||||||
return None
|
|
||||||
cache_key = self.get_cache_key(date, tp)
|
|
||||||
count = cache.get(cache_key)
|
|
||||||
return count
|
|
||||||
|
|
||||||
def __set_data_to_cache(self, date, tp, count):
|
|
||||||
cache_key = self.get_cache_key(date, tp)
|
|
||||||
cache.set(cache_key, count, 3600)
|
|
||||||
|
|
||||||
@staticmethod
|
|
||||||
def get_date_start_2_end(d):
|
|
||||||
time_min = timezone.datetime.min.time()
|
|
||||||
time_max = timezone.datetime.max.time()
|
|
||||||
tz = timezone.get_current_timezone()
|
|
||||||
ds = timezone.datetime.combine(d, time_min).replace(tzinfo=tz)
|
|
||||||
de = timezone.datetime.combine(d, time_max).replace(tzinfo=tz)
|
|
||||||
return ds, de
|
|
||||||
|
|
||||||
def get_date_login_count(self, date):
|
|
||||||
tp = "LOGIN-USER"
|
|
||||||
count = self.__get_data_from_cache(date, tp)
|
|
||||||
if count is not None:
|
|
||||||
return count
|
|
||||||
ds, de = self.get_date_start_2_end(date)
|
|
||||||
count = UserLoginLog.objects.filter(datetime__range=(ds, de)).count()
|
|
||||||
self.__set_data_to_cache(date, tp, count)
|
|
||||||
return count
|
|
||||||
|
|
||||||
def get_dates_metrics_total_count_login(self):
|
def get_dates_metrics_total_count_login(self):
|
||||||
data = []
|
queryset = UserLoginLog.objects \
|
||||||
for d in self.dates_list:
|
.filter(datetime__range=(self.date_start_end)) \
|
||||||
count = self.get_date_login_count(d)
|
.values('datetime__date').annotate(id__count=Count(id)) \
|
||||||
data.append(count)
|
.order_by('datetime__date')
|
||||||
if len(data) == 0:
|
map_date_logincount = {i['datetime__date']: i['id__count'] for i in queryset}
|
||||||
data = [0]
|
return [map_date_logincount.get(d, 0) for d in self.dates_list]
|
||||||
return data
|
|
||||||
|
|
||||||
def get_date_user_count(self, date):
|
|
||||||
tp = "USER"
|
|
||||||
count = self.__get_data_from_cache(date, tp)
|
|
||||||
if count is not None:
|
|
||||||
return count
|
|
||||||
ds, de = self.get_date_start_2_end(date)
|
|
||||||
count = len(set(Session.objects.filter(date_start__range=(ds, de)).values_list('user_id', flat=True)))
|
|
||||||
self.__set_data_to_cache(date, tp, count)
|
|
||||||
return count
|
|
||||||
|
|
||||||
def get_dates_metrics_total_count_active_users(self):
|
def get_dates_metrics_total_count_active_users(self):
|
||||||
data = []
|
queryset = Session.objects \
|
||||||
for d in self.dates_list:
|
.filter(date_start__range=(self.date_start_end)) \
|
||||||
count = self.get_date_user_count(d)
|
.values('date_start__date') \
|
||||||
data.append(count)
|
.annotate(id__count=Count('user_id', distinct=True)) \
|
||||||
return data
|
.order_by('date_start__date')
|
||||||
|
map_date_usercount = {i['date_start__date']: i['id__count'] for i in queryset}
|
||||||
def get_date_asset_count(self, date):
|
return [map_date_usercount.get(d, 0) for d in self.dates_list]
|
||||||
tp = "ASSET"
|
|
||||||
count = self.__get_data_from_cache(date, tp)
|
|
||||||
if count is not None:
|
|
||||||
return count
|
|
||||||
ds, de = self.get_date_start_2_end(date)
|
|
||||||
count = len(set(Session.objects.filter(date_start__range=(ds, de)).values_list('asset', flat=True)))
|
|
||||||
self.__set_data_to_cache(date, tp, count)
|
|
||||||
return count
|
|
||||||
|
|
||||||
def get_dates_metrics_total_count_active_assets(self):
|
def get_dates_metrics_total_count_active_assets(self):
|
||||||
data = []
|
queryset = Session.objects \
|
||||||
for d in self.dates_list:
|
.filter(date_start__range=(self.date_start_end)) \
|
||||||
count = self.get_date_asset_count(d)
|
.values('date_start__date') \
|
||||||
data.append(count)
|
.annotate(id__count=Count('asset_id', distinct=True)) \
|
||||||
return data
|
.order_by('date_start__date')
|
||||||
|
map_date_assetcount = {i['date_start__date']: i['id__count'] for i in queryset}
|
||||||
def get_date_session_count(self, date):
|
return [map_date_assetcount.get(d, 0) for d in self.dates_list]
|
||||||
tp = "SESSION"
|
|
||||||
count = self.__get_data_from_cache(date, tp)
|
|
||||||
if count is not None:
|
|
||||||
return count
|
|
||||||
ds, de = self.get_date_start_2_end(date)
|
|
||||||
count = Session.objects.filter(date_start__range=(ds, de)).count()
|
|
||||||
self.__set_data_to_cache(date, tp, count)
|
|
||||||
return count
|
|
||||||
|
|
||||||
def get_dates_metrics_total_count_sessions(self):
|
def get_dates_metrics_total_count_sessions(self):
|
||||||
data = []
|
queryset = Session.objects \
|
||||||
for d in self.dates_list:
|
.filter(date_start__range=(self.date_start_end)) \
|
||||||
count = self.get_date_session_count(d)
|
.values('date_start__date') \
|
||||||
data.append(count)
|
.annotate(id__count=Count(id)) \
|
||||||
return data
|
.order_by('date_start__date')
|
||||||
|
map_date_usercount = {i['date_start__date']: i['id__count'] for i in queryset}
|
||||||
|
return [map_date_usercount.get(d, 0) for d in self.dates_list]
|
||||||
|
|
||||||
@lazyproperty
|
@lazyproperty
|
||||||
def get_type_to_assets(self):
|
def get_type_to_assets(self):
|
||||||
|
@ -312,7 +249,7 @@ class DatesLoginMetricMixin:
|
||||||
|
|
||||||
@lazyproperty
|
@lazyproperty
|
||||||
def commands_danger_amount(self):
|
def commands_danger_amount(self):
|
||||||
return self.command_queryset.filter(risk_level=Command.RiskLevelChoices.dangerous).count()
|
return self.command_queryset.filter(risk_level=RiskLevelChoices.reject).count()
|
||||||
|
|
||||||
@lazyproperty
|
@lazyproperty
|
||||||
def job_logs_running_amount(self):
|
def job_logs_running_amount(self):
|
||||||
|
|
|
@ -186,8 +186,9 @@ class Config(dict):
|
||||||
'BOOTSTRAP_TOKEN': '',
|
'BOOTSTRAP_TOKEN': '',
|
||||||
'DEBUG': False,
|
'DEBUG': False,
|
||||||
'DEBUG_DEV': False,
|
'DEBUG_DEV': False,
|
||||||
|
'DEBUG_ANSIBLE': False,
|
||||||
'LOG_LEVEL': 'DEBUG',
|
'LOG_LEVEL': 'DEBUG',
|
||||||
'LOG_DIR': os.path.join(PROJECT_DIR, 'logs'),
|
'LOG_DIR': os.path.join(PROJECT_DIR, 'data', 'logs'),
|
||||||
'DB_ENGINE': 'mysql',
|
'DB_ENGINE': 'mysql',
|
||||||
'DB_NAME': 'jumpserver',
|
'DB_NAME': 'jumpserver',
|
||||||
'DB_HOST': '127.0.0.1',
|
'DB_HOST': '127.0.0.1',
|
||||||
|
@ -231,8 +232,12 @@ class Config(dict):
|
||||||
'SESSION_COOKIE_AGE': 3600 * 24,
|
'SESSION_COOKIE_AGE': 3600 * 24,
|
||||||
'SESSION_EXPIRE_AT_BROWSER_CLOSE': False,
|
'SESSION_EXPIRE_AT_BROWSER_CLOSE': False,
|
||||||
'LOGIN_URL': reverse_lazy('authentication:login'),
|
'LOGIN_URL': reverse_lazy('authentication:login'),
|
||||||
'CONNECTION_TOKEN_EXPIRATION': 5 * 60, # 默认
|
|
||||||
'CONNECTION_TOKEN_EXPIRATION_MAX': 60 * 60 * 24 * 30, # 最大
|
'CONNECTION_TOKEN_ONETIME_EXPIRATION': 5 * 60, # 默认(new)
|
||||||
|
'CONNECTION_TOKEN_EXPIRATION': 5 * 60, # 默认(old)
|
||||||
|
|
||||||
|
'CONNECTION_TOKEN_REUSABLE_EXPIRATION': 60 * 60 * 24 * 30, # 最大(new)
|
||||||
|
'CONNECTION_TOKEN_EXPIRATION_MAX': 60 * 60 * 24 * 30, # 最大(old)
|
||||||
'CONNECTION_TOKEN_REUSABLE': False,
|
'CONNECTION_TOKEN_REUSABLE': False,
|
||||||
|
|
||||||
# Custom Config
|
# Custom Config
|
||||||
|
@ -558,6 +563,11 @@ class Config(dict):
|
||||||
'FTP_FILE_MAX_STORE': 100,
|
'FTP_FILE_MAX_STORE': 100,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
old_config_map = {
|
||||||
|
'CONNECTION_TOKEN_ONETIME_EXPIRATION': 'CONNECTION_TOKEN_EXPIRATION',
|
||||||
|
'CONNECTION_TOKEN_REUSABLE_EXPIRATION': 'CONNECTION_TOKEN_EXPIRATION_MAX',
|
||||||
|
}
|
||||||
|
|
||||||
def __init__(self, *args):
|
def __init__(self, *args):
|
||||||
super().__init__(*args)
|
super().__init__(*args)
|
||||||
self.secret_encryptor = ConfigCrypto.get_secret_encryptor()
|
self.secret_encryptor = ConfigCrypto.get_secret_encryptor()
|
||||||
|
@ -698,13 +708,19 @@ class Config(dict):
|
||||||
value = self.convert_type(item, value)
|
value = self.convert_type(item, value)
|
||||||
return value
|
return value
|
||||||
|
|
||||||
def get(self, item):
|
def get(self, item, default=None):
|
||||||
# 再从配置文件中获取
|
# 再从配置文件中获取
|
||||||
value = self.get_from_config(item)
|
value = self.get_from_config(item)
|
||||||
if value is None:
|
if value is None:
|
||||||
value = self.get_from_env(item)
|
value = self.get_from_env(item)
|
||||||
|
|
||||||
|
# 因为要递归,所以优先从上次返回的递归中获取
|
||||||
|
if default is None:
|
||||||
|
default = self.defaults.get(item)
|
||||||
|
if value is None and item in self.old_config_map:
|
||||||
|
return self.get(self.old_config_map[item], default)
|
||||||
if value is None:
|
if value is None:
|
||||||
value = self.defaults.get(item)
|
value = default
|
||||||
if self.secret_encryptor:
|
if self.secret_encryptor:
|
||||||
value = self.secret_encryptor.decrypt_if_need(value, item)
|
value = self.secret_encryptor.decrypt_if_need(value, item)
|
||||||
return value
|
return value
|
||||||
|
|
|
@ -101,6 +101,19 @@ class RefererCheckMiddleware:
|
||||||
return response
|
return response
|
||||||
|
|
||||||
|
|
||||||
|
class SQLCountMiddleware:
|
||||||
|
def __init__(self, get_response):
|
||||||
|
self.get_response = get_response
|
||||||
|
if not settings.DEBUG_DEV:
|
||||||
|
raise MiddlewareNotUsed
|
||||||
|
|
||||||
|
def __call__(self, request):
|
||||||
|
from django.db import connection
|
||||||
|
response = self.get_response(request)
|
||||||
|
response['X-JMS-SQL-COUNT'] = len(connection.queries) - 2
|
||||||
|
return response
|
||||||
|
|
||||||
|
|
||||||
class StartMiddleware:
|
class StartMiddleware:
|
||||||
def __init__(self, get_response):
|
def __init__(self, get_response):
|
||||||
self.get_response = get_response
|
self.get_response = get_response
|
||||||
|
|
|
@ -175,13 +175,9 @@ AUTH_OAUTH2_LOGOUT_URL_NAME = "authentication:oauth2:logout"
|
||||||
AUTH_TEMP_TOKEN = CONFIG.AUTH_TEMP_TOKEN
|
AUTH_TEMP_TOKEN = CONFIG.AUTH_TEMP_TOKEN
|
||||||
|
|
||||||
# Other setting
|
# Other setting
|
||||||
|
# 这个是 User Login Private Token
|
||||||
TOKEN_EXPIRATION = CONFIG.TOKEN_EXPIRATION
|
TOKEN_EXPIRATION = CONFIG.TOKEN_EXPIRATION
|
||||||
OTP_IN_RADIUS = CONFIG.OTP_IN_RADIUS
|
OTP_IN_RADIUS = CONFIG.OTP_IN_RADIUS
|
||||||
# Connection token
|
|
||||||
CONNECTION_TOKEN_EXPIRATION = CONFIG.CONNECTION_TOKEN_EXPIRATION
|
|
||||||
if CONNECTION_TOKEN_EXPIRATION < 5 * 60:
|
|
||||||
# 最少5分钟
|
|
||||||
CONNECTION_TOKEN_EXPIRATION = 5 * 60
|
|
||||||
|
|
||||||
RBAC_BACKEND = 'rbac.backends.RBACBackend'
|
RBAC_BACKEND = 'rbac.backends.RBACBackend'
|
||||||
AUTH_BACKEND_MODEL = 'authentication.backends.base.JMSModelBackend'
|
AUTH_BACKEND_MODEL = 'authentication.backends.base.JMSModelBackend'
|
||||||
|
|
|
@ -53,6 +53,8 @@ BOOTSTRAP_TOKEN = CONFIG.BOOTSTRAP_TOKEN
|
||||||
DEBUG = CONFIG.DEBUG
|
DEBUG = CONFIG.DEBUG
|
||||||
# SECURITY WARNING: If you run with debug turned on, more debug msg with be log
|
# SECURITY WARNING: If you run with debug turned on, more debug msg with be log
|
||||||
DEBUG_DEV = CONFIG.DEBUG_DEV
|
DEBUG_DEV = CONFIG.DEBUG_DEV
|
||||||
|
# SECURITY WARNING: If you run ansible task with debug turned on, more debug msg with be log
|
||||||
|
DEBUG_ANSIBLE = CONFIG.DEBUG_ANSIBLE
|
||||||
|
|
||||||
# Absolute url for some case, for example email link
|
# Absolute url for some case, for example email link
|
||||||
SITE_URL = CONFIG.SITE_URL
|
SITE_URL = CONFIG.SITE_URL
|
||||||
|
@ -128,6 +130,7 @@ MIDDLEWARE = [
|
||||||
'jumpserver.middleware.DemoMiddleware',
|
'jumpserver.middleware.DemoMiddleware',
|
||||||
'jumpserver.middleware.RequestMiddleware',
|
'jumpserver.middleware.RequestMiddleware',
|
||||||
'jumpserver.middleware.RefererCheckMiddleware',
|
'jumpserver.middleware.RefererCheckMiddleware',
|
||||||
|
'jumpserver.middleware.SQLCountMiddleware',
|
||||||
'orgs.middleware.OrgMiddleware',
|
'orgs.middleware.OrgMiddleware',
|
||||||
'authentication.backends.oidc.middleware.OIDCRefreshIDTokenMiddleware',
|
'authentication.backends.oidc.middleware.OIDCRefreshIDTokenMiddleware',
|
||||||
'authentication.backends.cas.middleware.CASMiddleware',
|
'authentication.backends.cas.middleware.CASMiddleware',
|
||||||
|
|
|
@ -133,8 +133,13 @@ TICKETS_ENABLED = CONFIG.TICKETS_ENABLED
|
||||||
REFERER_CHECK_ENABLED = CONFIG.REFERER_CHECK_ENABLED
|
REFERER_CHECK_ENABLED = CONFIG.REFERER_CHECK_ENABLED
|
||||||
|
|
||||||
CONNECTION_TOKEN_ENABLED = CONFIG.CONNECTION_TOKEN_ENABLED
|
CONNECTION_TOKEN_ENABLED = CONFIG.CONNECTION_TOKEN_ENABLED
|
||||||
|
# Connection token
|
||||||
|
CONNECTION_TOKEN_ONETIME_EXPIRATION = CONFIG.CONNECTION_TOKEN_ONETIME_EXPIRATION
|
||||||
|
if CONNECTION_TOKEN_ONETIME_EXPIRATION < 5 * 60:
|
||||||
|
# 最少5分钟
|
||||||
|
CONNECTION_TOKEN_ONETIME_EXPIRATION = 5 * 60
|
||||||
CONNECTION_TOKEN_REUSABLE = CONFIG.CONNECTION_TOKEN_REUSABLE
|
CONNECTION_TOKEN_REUSABLE = CONFIG.CONNECTION_TOKEN_REUSABLE
|
||||||
CONNECTION_TOKEN_EXPIRATION_MAX = CONFIG.CONNECTION_TOKEN_EXPIRATION_MAX
|
CONNECTION_TOKEN_REUSABLE_EXPIRATION = CONFIG.CONNECTION_TOKEN_REUSABLE_EXPIRATION
|
||||||
|
|
||||||
FORGOT_PASSWORD_URL = CONFIG.FORGOT_PASSWORD_URL
|
FORGOT_PASSWORD_URL = CONFIG.FORGOT_PASSWORD_URL
|
||||||
|
|
||||||
|
|
|
@ -1,9 +1,10 @@
|
||||||
# -*- coding: utf-8 -*-
|
# -*- coding: utf-8 -*-
|
||||||
#
|
#
|
||||||
import os
|
import os
|
||||||
|
|
||||||
from ..const import PROJECT_DIR, CONFIG
|
from ..const import PROJECT_DIR, CONFIG
|
||||||
|
|
||||||
LOG_DIR = os.path.join(PROJECT_DIR, 'logs')
|
LOG_DIR = os.path.join(PROJECT_DIR, 'data', 'logs')
|
||||||
JUMPSERVER_LOG_FILE = os.path.join(LOG_DIR, 'jumpserver.log')
|
JUMPSERVER_LOG_FILE = os.path.join(LOG_DIR, 'jumpserver.log')
|
||||||
DRF_EXCEPTION_LOG_FILE = os.path.join(LOG_DIR, 'drf_exception.log')
|
DRF_EXCEPTION_LOG_FILE = os.path.join(LOG_DIR, 'drf_exception.log')
|
||||||
UNEXPECTED_EXCEPTION_LOG_FILE = os.path.join(LOG_DIR, 'unexpected_exception.log')
|
UNEXPECTED_EXCEPTION_LOG_FILE = os.path.join(LOG_DIR, 'unexpected_exception.log')
|
||||||
|
@ -132,7 +133,6 @@ LOGGING = {
|
||||||
'handlers': ['null'],
|
'handlers': ['null'],
|
||||||
'level': 'ERROR'
|
'level': 'ERROR'
|
||||||
}
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue