mirror of https://github.com/jumpserver/jumpserver
commit
0b3a7bb020
|
@ -1,21 +1,33 @@
|
|||
name: Build and Push Base Image
|
||||
|
||||
on:
|
||||
push:
|
||||
pull_request:
|
||||
branches:
|
||||
- 'pr*'
|
||||
- 'dev'
|
||||
- 'v*'
|
||||
paths:
|
||||
- 'poetry.lock'
|
||||
- 'pyproject.toml'
|
||||
- 'Dockerfile-base'
|
||||
- poetry.lock
|
||||
- pyproject.toml
|
||||
- Dockerfile-base
|
||||
- package.json
|
||||
- go.mod
|
||||
- yarn.lock
|
||||
- pom.xml
|
||||
- install_deps.sh
|
||||
- utils/clean_site_packages.sh
|
||||
types:
|
||||
- opened
|
||||
- synchronize
|
||||
- reopened
|
||||
|
||||
jobs:
|
||||
build-and-push:
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v3
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
ref: ${{ github.event.pull_request.head.ref }}
|
||||
|
||||
- name: Set up QEMU
|
||||
uses: docker/setup-qemu-action@v3
|
||||
|
@ -55,6 +67,6 @@ jobs:
|
|||
git config --global user.email 'github-actions[bot]@users.noreply.github.com'
|
||||
git add Dockerfile
|
||||
git commit -m "perf: Update Dockerfile with new base image tag"
|
||||
git push
|
||||
git push origin ${{ github.event.pull_request.head.ref }}
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
|
|
@ -0,0 +1,31 @@
|
|||
name: Check I18n files CompileMessages
|
||||
|
||||
on:
|
||||
pull_request:
|
||||
branches:
|
||||
- 'dev'
|
||||
paths:
|
||||
- 'apps/i18n/core/**/*.po'
|
||||
types:
|
||||
- opened
|
||||
- synchronize
|
||||
- reopened
|
||||
jobs:
|
||||
compile-messages-check:
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v3
|
||||
|
||||
- name: Build and check compilemessages
|
||||
uses: docker/build-push-action@v6
|
||||
with:
|
||||
platforms: linux/amd64
|
||||
push: false
|
||||
file: Dockerfile
|
||||
target: stage-build
|
||||
tags: jumpserver/core:stage-build
|
|
@ -1,4 +1,4 @@
|
|||
FROM jumpserver/core-base:20240808_054051 AS stage-build
|
||||
FROM jumpserver/core-base:20240919_024156 AS stage-build
|
||||
|
||||
ARG VERSION
|
||||
|
||||
|
|
|
@ -43,12 +43,18 @@ RUN set -ex \
|
|||
WORKDIR /opt/jumpserver
|
||||
|
||||
ARG PIP_MIRROR=https://pypi.org/simple
|
||||
ENV ANSIBLE_COLLECTIONS_PATHS=/opt/py3/lib/python3.11/site-packages/ansible_collections
|
||||
|
||||
RUN --mount=type=cache,target=/root/.cache,sharing=locked,id=core \
|
||||
--mount=type=bind,source=poetry.lock,target=poetry.lock \
|
||||
--mount=type=bind,source=pyproject.toml,target=pyproject.toml \
|
||||
--mount=type=bind,source=utils/clean_site_packages.sh,target=clean_site_packages.sh \
|
||||
--mount=type=bind,source=requirements/collections.yml,target=collections.yml \
|
||||
set -ex \
|
||||
&& python3 -m venv /opt/py3 \
|
||||
&& pip install poetry -i ${PIP_MIRROR} \
|
||||
&& poetry config virtualenvs.create false \
|
||||
&& . /opt/py3/bin/activate \
|
||||
&& poetry install --only main
|
||||
&& poetry install --only main \
|
||||
&& ansible-galaxy collection install -r collections.yml --force --ignore-certs \
|
||||
&& bash clean_site_packages.sh
|
||||
|
|
|
@ -32,6 +32,8 @@ Access JumpServer in your browser at `http://your-jumpserver-ip/`
|
|||
- Username: `admin`
|
||||
- Password: `ChangeMe`
|
||||
|
||||
[![JumpServer Quickstart](https://github.com/user-attachments/assets/0f32f52b-9935-485e-8534-336c63389612)](https://www.youtube.com/watch?v=UlGYRbKrpgY "JumpServer Quickstart")
|
||||
|
||||
## Screenshots
|
||||
|
||||
<table style="border-collapse: collapse; border: 1px solid black;">
|
||||
|
|
|
@ -4,6 +4,9 @@
|
|||
ansible_python_interpreter: /opt/py3/bin/python
|
||||
db_name: "{{ jms_asset.spec_info.db_name }}"
|
||||
check_ssl: "{{ jms_asset.spec_info.use_ssl and not jms_asset.spec_info.allow_invalid_cert }}"
|
||||
ca_cert: "{{ jms_asset.secret_info.ca_cert | default('') }}"
|
||||
ssl_cert: "{{ jms_asset.secret_info.client_cert | default('') }}"
|
||||
ssl_key: "{{ jms_asset.secret_info.client_key | default('') }}"
|
||||
|
||||
tasks:
|
||||
- name: Test MySQL connection
|
||||
|
@ -13,9 +16,9 @@
|
|||
login_host: "{{ jms_asset.address }}"
|
||||
login_port: "{{ jms_asset.port }}"
|
||||
check_hostname: "{{ check_ssl if check_ssl else omit }}"
|
||||
ca_cert: "{{ jms_asset.secret_info.ca_cert | default(omit) if check_ssl else omit }}"
|
||||
client_cert: "{{ jms_asset.secret_info.client_cert | default(omit) if check_ssl else omit }}"
|
||||
client_key: "{{ jms_asset.secret_info.client_key | default(omit) if check_ssl else omit }}"
|
||||
ca_cert: "{{ ca_cert if check_ssl and ca_cert | length > 0 else omit }}"
|
||||
client_cert: "{{ ssl_cert if check_ssl and ssl_cert | length > 0 else omit }}"
|
||||
client_key: "{{ ssl_key if check_ssl and ssl_key | length > 0 else omit }}"
|
||||
filter: version
|
||||
register: db_info
|
||||
|
||||
|
@ -30,9 +33,9 @@
|
|||
login_host: "{{ jms_asset.address }}"
|
||||
login_port: "{{ jms_asset.port }}"
|
||||
check_hostname: "{{ check_ssl if check_ssl else omit }}"
|
||||
ca_cert: "{{ jms_asset.secret_info.ca_cert | default(omit) if check_ssl else omit }}"
|
||||
client_cert: "{{ jms_asset.secret_info.client_cert | default(omit) if check_ssl else omit }}"
|
||||
client_key: "{{ jms_asset.secret_info.client_key | default(omit) if check_ssl else omit }}"
|
||||
ca_cert: "{{ ca_cert if check_ssl and ca_cert | length > 0 else omit }}"
|
||||
client_cert: "{{ ssl_cert if check_ssl and ssl_cert | length > 0 else omit }}"
|
||||
client_key: "{{ ssl_key if check_ssl and ssl_key | length > 0 else omit }}"
|
||||
name: "{{ account.username }}"
|
||||
password: "{{ account.secret }}"
|
||||
host: "%"
|
||||
|
@ -47,7 +50,7 @@
|
|||
login_host: "{{ jms_asset.address }}"
|
||||
login_port: "{{ jms_asset.port }}"
|
||||
check_hostname: "{{ check_ssl if check_ssl else omit }}"
|
||||
ca_cert: "{{ jms_asset.secret_info.ca_cert | default(omit) if check_ssl else omit }}"
|
||||
client_cert: "{{ jms_asset.secret_info.client_cert | default(omit) if check_ssl else omit }}"
|
||||
client_key: "{{ jms_asset.secret_info.client_key | default(omit) if check_ssl else omit }}"
|
||||
ca_cert: "{{ ca_cert if check_ssl and ca_cert | length > 0 else omit }}"
|
||||
client_cert: "{{ ssl_cert if check_ssl and ssl_cert | length > 0 else omit }}"
|
||||
client_key: "{{ ssl_key if check_ssl and ssl_key | length > 0 else omit }}"
|
||||
filter: version
|
||||
|
|
|
@ -2,6 +2,10 @@
|
|||
gather_facts: no
|
||||
vars:
|
||||
ansible_python_interpreter: /opt/py3/bin/python
|
||||
check_ssl: "{{ jms_asset.spec_info.use_ssl }}"
|
||||
ca_cert: "{{ jms_asset.secret_info.ca_cert | default('') }}"
|
||||
ssl_cert: "{{ jms_asset.secret_info.client_cert | default('') }}"
|
||||
ssl_key: "{{ jms_asset.secret_info.client_key | default('') }}"
|
||||
|
||||
tasks:
|
||||
- name: Test PostgreSQL connection
|
||||
|
@ -11,6 +15,10 @@
|
|||
login_host: "{{ jms_asset.address }}"
|
||||
login_port: "{{ jms_asset.port }}"
|
||||
login_db: "{{ jms_asset.spec_info.db_name }}"
|
||||
ca_cert: "{{ ca_cert if check_ssl and ca_cert | length > 0 else omit }}"
|
||||
ssl_cert: "{{ ssl_cert if check_ssl and ssl_cert | length > 0 else omit }}"
|
||||
ssl_key: "{{ ssl_key if check_ssl and ssl_key | length > 0 else omit }}"
|
||||
ssl_mode: "{{ jms_asset.spec_info.pg_ssl_mode }}"
|
||||
register: result
|
||||
failed_when: not result.is_available
|
||||
|
||||
|
@ -28,6 +36,10 @@
|
|||
db: "{{ jms_asset.spec_info.db_name }}"
|
||||
name: "{{ account.username }}"
|
||||
password: "{{ account.secret }}"
|
||||
ca_cert: "{{ ca_cert if check_ssl and ca_cert | length > 0 else omit }}"
|
||||
ssl_cert: "{{ ssl_cert if check_ssl and ssl_cert | length > 0 else omit }}"
|
||||
ssl_key: "{{ ssl_key if check_ssl and ssl_key | length > 0 else omit }}"
|
||||
ssl_mode: "{{ jms_asset.spec_info.pg_ssl_mode }}"
|
||||
role_attr_flags: LOGIN
|
||||
ignore_errors: true
|
||||
when: result is succeeded
|
||||
|
@ -39,3 +51,7 @@
|
|||
login_host: "{{ jms_asset.address }}"
|
||||
login_port: "{{ jms_asset.port }}"
|
||||
db: "{{ jms_asset.spec_info.db_name }}"
|
||||
ca_cert: "{{ ca_cert if check_ssl and ca_cert | length > 0 else omit }}"
|
||||
ssl_cert: "{{ ssl_cert if check_ssl and ssl_cert | length > 0 else omit }}"
|
||||
ssl_key: "{{ ssl_key if check_ssl and ssl_key | length > 0 else omit }}"
|
||||
ssl_mode: "{{ jms_asset.spec_info.pg_ssl_mode }}"
|
||||
|
|
|
@ -14,27 +14,15 @@
|
|||
- name: "Add {{ account.username }} user"
|
||||
ansible.builtin.user:
|
||||
name: "{{ account.username }}"
|
||||
shell: "{{ params.shell }}"
|
||||
home: "{{ params.home | default('/home/' + account.username, true) }}"
|
||||
groups: "{{ params.groups }}"
|
||||
uid: "{{ params.uid | int if params.uid | length > 0 else omit }}"
|
||||
shell: "{{ params.shell if params.shell | length > 0 else omit }}"
|
||||
home: "{{ params.home if params.home | length > 0 else '/home/' + account.username }}"
|
||||
groups: "{{ params.groups if params.groups | length > 0 else omit }}"
|
||||
append: yes
|
||||
expires: -1
|
||||
state: present
|
||||
when: user_info.failed
|
||||
|
||||
- name: "Add {{ account.username }} group"
|
||||
ansible.builtin.group:
|
||||
name: "{{ account.username }}"
|
||||
state: present
|
||||
when: user_info.failed
|
||||
|
||||
- name: "Add {{ account.username }} user to group"
|
||||
ansible.builtin.user:
|
||||
name: "{{ account.username }}"
|
||||
groups: "{{ params.groups }}"
|
||||
when:
|
||||
- user_info.failed
|
||||
- params.groups
|
||||
|
||||
- name: "Set {{ account.username }} sudo setting"
|
||||
ansible.builtin.lineinfile:
|
||||
dest: /etc/sudoers
|
||||
|
@ -54,14 +42,40 @@
|
|||
ignore_errors: true
|
||||
when: account.secret_type == "password"
|
||||
|
||||
- name: remove jumpserver ssh key
|
||||
- name: "Get home directory for {{ account.username }}"
|
||||
ansible.builtin.shell: "getent passwd {{ account.username }} | cut -d: -f6"
|
||||
register: home_dir
|
||||
when: account.secret_type == "ssh_key"
|
||||
ignore_errors: yes
|
||||
|
||||
- name: "Check if home directory exists for {{ account.username }}"
|
||||
ansible.builtin.stat:
|
||||
path: "{{ home_dir.stdout.strip() }}"
|
||||
register: home_dir_stat
|
||||
when: account.secret_type == "ssh_key"
|
||||
ignore_errors: yes
|
||||
|
||||
- name: "Ensure {{ account.username }} home directory exists"
|
||||
ansible.builtin.file:
|
||||
path: "{{ home_dir.stdout.strip() }}"
|
||||
state: directory
|
||||
owner: "{{ account.username }}"
|
||||
group: "{{ account.username }}"
|
||||
mode: '0750'
|
||||
when:
|
||||
- account.secret_type == "ssh_key"
|
||||
- home_dir_stat.stat.exists == false
|
||||
ignore_errors: yes
|
||||
|
||||
- name: Remove jumpserver ssh key
|
||||
ansible.builtin.lineinfile:
|
||||
dest: "{{ ssh_params.dest }}"
|
||||
dest: "{{ home_dir.stdout.strip() }}/.ssh/authorized_keys"
|
||||
regexp: "{{ ssh_params.regexp }}"
|
||||
state: absent
|
||||
when:
|
||||
- account.secret_type == "ssh_key"
|
||||
- ssh_params.strategy == "set_jms"
|
||||
ignore_errors: yes
|
||||
|
||||
- name: "Change {{ account.username }} SSH key"
|
||||
ansible.builtin.authorized_key:
|
||||
|
@ -79,7 +93,7 @@
|
|||
login_password: "{{ account.secret }}"
|
||||
login_host: "{{ jms_asset.address }}"
|
||||
login_port: "{{ jms_asset.port }}"
|
||||
gateway_args: "{{ jms_asset.ansible_ssh_common_args | default('') }}"
|
||||
gateway_args: "{{ jms_asset.ansible_ssh_common_args | default(None) }}"
|
||||
become: "{{ account.become.ansible_become | default(False) }}"
|
||||
become_method: su
|
||||
become_user: "{{ account.become.ansible_user | default('') }}"
|
||||
|
@ -95,7 +109,7 @@
|
|||
login_port: "{{ jms_asset.port }}"
|
||||
login_user: "{{ account.username }}"
|
||||
login_private_key_path: "{{ account.private_key_path }}"
|
||||
gateway_args: "{{ jms_asset.ansible_ssh_common_args | default('') }}"
|
||||
gateway_args: "{{ jms_asset.ansible_ssh_common_args | default(None) }}"
|
||||
old_ssh_version: "{{ jms_asset.old_ssh_version | default(False) }}"
|
||||
when: account.secret_type == "ssh_key"
|
||||
delegate_to: localhost
|
||||
|
|
|
@ -34,6 +34,12 @@ params:
|
|||
default: ''
|
||||
help_text: "{{ 'Params groups help text' | trans }}"
|
||||
|
||||
- name: uid
|
||||
type: str
|
||||
label: "{{ 'Params uid label' | trans }}"
|
||||
default: ''
|
||||
help_text: "{{ 'Params uid help text' | trans }}"
|
||||
|
||||
i18n:
|
||||
AIX account change secret:
|
||||
zh: '使用 Ansible 模块 user 执行账号改密 (DES)'
|
||||
|
@ -60,6 +66,11 @@ i18n:
|
|||
ja: 'グループを入力してください。複数のグループはコンマで区切ってください(既存のグループを入力してください)'
|
||||
en: 'Please enter the group. Multiple groups are separated by commas (please enter the existing group)'
|
||||
|
||||
Params uid help text:
|
||||
zh: '请输入用户ID'
|
||||
ja: 'ユーザーIDを入力してください'
|
||||
en: 'Please enter the user ID'
|
||||
|
||||
Modify sudo label:
|
||||
zh: '修改 sudo 权限'
|
||||
ja: 'sudo 権限を変更'
|
||||
|
@ -75,3 +86,7 @@ i18n:
|
|||
ja: 'グループ'
|
||||
en: 'Groups'
|
||||
|
||||
Params uid label:
|
||||
zh: '用户ID'
|
||||
ja: 'ユーザーID'
|
||||
en: 'User ID'
|
||||
|
|
|
@ -14,27 +14,15 @@
|
|||
- name: "Add {{ account.username }} user"
|
||||
ansible.builtin.user:
|
||||
name: "{{ account.username }}"
|
||||
shell: "{{ params.shell }}"
|
||||
home: "{{ params.home | default('/home/' + account.username, true) }}"
|
||||
groups: "{{ params.groups }}"
|
||||
uid: "{{ params.uid | int if params.uid | length > 0 else omit }}"
|
||||
shell: "{{ params.shell if params.shell | length > 0 else omit }}"
|
||||
home: "{{ params.home if params.home | length > 0 else '/home/' + account.username }}"
|
||||
groups: "{{ params.groups if params.groups | length > 0 else omit }}"
|
||||
append: yes
|
||||
expires: -1
|
||||
state: present
|
||||
when: user_info.failed
|
||||
|
||||
- name: "Add {{ account.username }} group"
|
||||
ansible.builtin.group:
|
||||
name: "{{ account.username }}"
|
||||
state: present
|
||||
when: user_info.failed
|
||||
|
||||
- name: "Add {{ account.username }} user to group"
|
||||
ansible.builtin.user:
|
||||
name: "{{ account.username }}"
|
||||
groups: "{{ params.groups }}"
|
||||
when:
|
||||
- user_info.failed
|
||||
- params.groups
|
||||
|
||||
- name: "Set {{ account.username }} sudo setting"
|
||||
ansible.builtin.lineinfile:
|
||||
dest: /etc/sudoers
|
||||
|
@ -54,14 +42,40 @@
|
|||
ignore_errors: true
|
||||
when: account.secret_type == "password"
|
||||
|
||||
- name: remove jumpserver ssh key
|
||||
- name: "Get home directory for {{ account.username }}"
|
||||
ansible.builtin.shell: "getent passwd {{ account.username }} | cut -d: -f6"
|
||||
register: home_dir
|
||||
when: account.secret_type == "ssh_key"
|
||||
ignore_errors: yes
|
||||
|
||||
- name: "Check if home directory exists for {{ account.username }}"
|
||||
ansible.builtin.stat:
|
||||
path: "{{ home_dir.stdout.strip() }}"
|
||||
register: home_dir_stat
|
||||
when: account.secret_type == "ssh_key"
|
||||
ignore_errors: yes
|
||||
|
||||
- name: "Ensure {{ account.username }} home directory exists"
|
||||
ansible.builtin.file:
|
||||
path: "{{ home_dir.stdout.strip() }}"
|
||||
state: directory
|
||||
owner: "{{ account.username }}"
|
||||
group: "{{ account.username }}"
|
||||
mode: '0750'
|
||||
when:
|
||||
- account.secret_type == "ssh_key"
|
||||
- home_dir_stat.stat.exists == false
|
||||
ignore_errors: yes
|
||||
|
||||
- name: Remove jumpserver ssh key
|
||||
ansible.builtin.lineinfile:
|
||||
dest: "{{ ssh_params.dest }}"
|
||||
dest: "{{ home_dir.stdout.strip() }}/.ssh/authorized_keys"
|
||||
regexp: "{{ ssh_params.regexp }}"
|
||||
state: absent
|
||||
when:
|
||||
- account.secret_type == "ssh_key"
|
||||
- ssh_params.strategy == "set_jms"
|
||||
ignore_errors: yes
|
||||
|
||||
- name: "Change {{ account.username }} SSH key"
|
||||
ansible.builtin.authorized_key:
|
||||
|
@ -79,7 +93,7 @@
|
|||
login_password: "{{ account.secret }}"
|
||||
login_host: "{{ jms_asset.address }}"
|
||||
login_port: "{{ jms_asset.port }}"
|
||||
gateway_args: "{{ jms_asset.ansible_ssh_common_args | default('') }}"
|
||||
gateway_args: "{{ jms_asset.ansible_ssh_common_args | default(None) }}"
|
||||
become: "{{ account.become.ansible_become | default(False) }}"
|
||||
become_method: su
|
||||
become_user: "{{ account.become.ansible_user | default('') }}"
|
||||
|
@ -95,7 +109,7 @@
|
|||
login_port: "{{ jms_asset.port }}"
|
||||
login_user: "{{ account.username }}"
|
||||
login_private_key_path: "{{ account.private_key_path }}"
|
||||
gateway_args: "{{ jms_asset.ansible_ssh_common_args | default('') }}"
|
||||
gateway_args: "{{ jms_asset.ansible_ssh_common_args | default(None) }}"
|
||||
old_ssh_version: "{{ jms_asset.old_ssh_version | default(False) }}"
|
||||
when: account.secret_type == "ssh_key"
|
||||
delegate_to: localhost
|
||||
|
|
|
@ -36,6 +36,12 @@ params:
|
|||
default: ''
|
||||
help_text: "{{ 'Params groups help text' | trans }}"
|
||||
|
||||
- name: uid
|
||||
type: str
|
||||
label: "{{ 'Params uid label' | trans }}"
|
||||
default: ''
|
||||
help_text: "{{ 'Params uid help text' | trans }}"
|
||||
|
||||
i18n:
|
||||
Posix account change secret:
|
||||
zh: '使用 Ansible 模块 user 执行账号改密 (SHA512)'
|
||||
|
@ -62,6 +68,11 @@ i18n:
|
|||
ja: 'グループを入力してください。複数のグループはコンマで区切ってください(既存のグループを入力してください)'
|
||||
en: 'Please enter the group. Multiple groups are separated by commas (please enter the existing group)'
|
||||
|
||||
Params uid help text:
|
||||
zh: '请输入用户ID'
|
||||
ja: 'ユーザーIDを入力してください'
|
||||
en: 'Please enter the user ID'
|
||||
|
||||
Modify sudo label:
|
||||
zh: '修改 sudo 权限'
|
||||
ja: 'sudo 権限を変更'
|
||||
|
@ -77,3 +88,7 @@ i18n:
|
|||
ja: 'グループ'
|
||||
en: 'Groups'
|
||||
|
||||
Params uid label:
|
||||
zh: '用户ID'
|
||||
ja: 'ユーザーID'
|
||||
en: 'User ID'
|
||||
|
|
|
@ -25,11 +25,11 @@
|
|||
|
||||
- name: Verify password (pyfreerdp)
|
||||
rdp_ping:
|
||||
login_host: "{{ jms_asset.address }}"
|
||||
login_host: "{{ jms_asset.origin_address }}"
|
||||
login_port: "{{ jms_asset.protocols | selectattr('name', 'equalto', 'rdp') | map(attribute='port') | first }}"
|
||||
login_user: "{{ account.username }}"
|
||||
login_password: "{{ account.secret }}"
|
||||
login_secret_type: "{{ account.secret_type }}"
|
||||
login_private_key_path: "{{ account.private_key_path }}"
|
||||
gateway_args: "{{ jms_gateway | default(None) }}"
|
||||
when: account.secret_type == "password"
|
||||
delegate_to: localhost
|
||||
|
|
|
@ -50,9 +50,6 @@ class ChangeSecretManager(AccountBasePlaybookManager):
|
|||
kwargs['exclusive'] = 'yes' if kwargs['strategy'] == SSHKeyStrategy.set else 'no'
|
||||
|
||||
if kwargs['strategy'] == SSHKeyStrategy.set_jms:
|
||||
username = account.username
|
||||
path = f'/{username}' if username == "root" else f'/home/{username}'
|
||||
kwargs['dest'] = f'{path}/.ssh/authorized_keys'
|
||||
kwargs['regexp'] = '.*{}$'.format(secret.split()[2].strip())
|
||||
return kwargs
|
||||
|
||||
|
@ -130,6 +127,7 @@ class ChangeSecretManager(AccountBasePlaybookManager):
|
|||
recorder = ChangeSecretRecord(
|
||||
asset=asset, account=account, execution=self.execution,
|
||||
old_secret=account.secret, new_secret=new_secret,
|
||||
comment=f'{account.username}@{asset.address}'
|
||||
)
|
||||
records.append(recorder)
|
||||
else:
|
||||
|
|
|
@ -3,6 +3,9 @@
|
|||
vars:
|
||||
ansible_python_interpreter: /opt/py3/bin/python
|
||||
check_ssl: "{{ jms_asset.spec_info.use_ssl and not jms_asset.spec_info.allow_invalid_cert }}"
|
||||
ca_cert: "{{ jms_asset.secret_info.ca_cert | default('') }}"
|
||||
ssl_cert: "{{ jms_asset.secret_info.client_cert | default('') }}"
|
||||
ssl_key: "{{ jms_asset.secret_info.client_key | default('') }}"
|
||||
|
||||
tasks:
|
||||
- name: Get info
|
||||
|
@ -12,9 +15,9 @@
|
|||
login_host: "{{ jms_asset.address }}"
|
||||
login_port: "{{ jms_asset.port }}"
|
||||
check_hostname: "{{ check_ssl if check_ssl else omit }}"
|
||||
ca_cert: "{{ jms_asset.secret_info.ca_cert | default(omit) if check_ssl else omit }}"
|
||||
client_cert: "{{ jms_asset.secret_info.client_cert | default(omit) if check_ssl else omit }}"
|
||||
client_key: "{{ jms_asset.secret_info.client_key | default(omit) if check_ssl else omit }}"
|
||||
ca_cert: "{{ ca_cert if check_ssl and ca_cert | length > 0 else omit }}"
|
||||
client_cert: "{{ ssl_cert if check_ssl and ssl_cert | length > 0 else omit }}"
|
||||
client_key: "{{ ssl_key if check_ssl and ssl_key | length > 0 else omit }}"
|
||||
filter: users
|
||||
register: db_info
|
||||
|
||||
|
|
|
@ -2,6 +2,10 @@
|
|||
gather_facts: no
|
||||
vars:
|
||||
ansible_python_interpreter: /opt/py3/bin/python
|
||||
check_ssl: "{{ jms_asset.spec_info.use_ssl }}"
|
||||
ca_cert: "{{ jms_asset.secret_info.ca_cert | default('') }}"
|
||||
ssl_cert: "{{ jms_asset.secret_info.client_cert | default('') }}"
|
||||
ssl_key: "{{ jms_asset.secret_info.client_key | default('') }}"
|
||||
|
||||
tasks:
|
||||
- name: Get info
|
||||
|
@ -11,6 +15,10 @@
|
|||
login_host: "{{ jms_asset.address }}"
|
||||
login_port: "{{ jms_asset.port }}"
|
||||
login_db: "{{ jms_asset.spec_info.db_name }}"
|
||||
ca_cert: "{{ ca_cert if check_ssl and ca_cert | length > 0 else omit }}"
|
||||
ssl_cert: "{{ ssl_cert if check_ssl and ssl_cert | length > 0 else omit }}"
|
||||
ssl_key: "{{ ssl_key if check_ssl and ssl_key | length > 0 else omit }}"
|
||||
ssl_mode: "{{ jms_asset.spec_info.pg_ssl_mode }}"
|
||||
filter: "roles"
|
||||
register: db_info
|
||||
|
||||
|
|
|
@ -95,12 +95,14 @@ class GatherAccountsManager(AccountBasePlaybookManager):
|
|||
return None, None
|
||||
|
||||
users = User.objects.filter(id__in=recipients)
|
||||
if not users:
|
||||
if not users.exists():
|
||||
return users, None
|
||||
|
||||
asset_ids = self.asset_username_mapper.keys()
|
||||
assets = Asset.objects.filter(id__in=asset_ids)
|
||||
|
||||
assets = Asset.objects.filter(id__in=asset_ids).prefetch_related('accounts')
|
||||
gather_accounts = GatheredAccount.objects.filter(asset_id__in=asset_ids, present=True)
|
||||
|
||||
asset_id_map = {str(asset.id): asset for asset in assets}
|
||||
asset_id_username = list(assets.values_list('id', 'accounts__username'))
|
||||
asset_id_username.extend(list(gather_accounts.values_list('asset_id', 'username')))
|
||||
|
@ -109,26 +111,24 @@ class GatherAccountsManager(AccountBasePlaybookManager):
|
|||
for asset_id, username in asset_id_username:
|
||||
system_asset_username_mapper[str(asset_id)].add(username)
|
||||
|
||||
change_info = {}
|
||||
change_info = defaultdict(dict)
|
||||
for asset_id, usernames in self.asset_username_mapper.items():
|
||||
system_usernames = system_asset_username_mapper.get(asset_id)
|
||||
|
||||
if not system_usernames:
|
||||
continue
|
||||
|
||||
add_usernames = usernames - system_usernames
|
||||
remove_usernames = system_usernames - usernames
|
||||
k = f'{asset_id_map[asset_id]}[{asset_id}]'
|
||||
|
||||
if not add_usernames and not remove_usernames:
|
||||
continue
|
||||
|
||||
change_info[k] = {
|
||||
'add_usernames': ', '.join(add_usernames),
|
||||
'remove_usernames': ', '.join(remove_usernames),
|
||||
change_info[str(asset_id_map[asset_id])] = {
|
||||
'add_usernames': add_usernames,
|
||||
'remove_usernames': remove_usernames
|
||||
}
|
||||
|
||||
return users, change_info
|
||||
return users, dict(change_info)
|
||||
|
||||
@staticmethod
|
||||
def send_email_if_need(users, change_info):
|
||||
|
|
|
@ -4,6 +4,9 @@
|
|||
ansible_python_interpreter: /opt/py3/bin/python
|
||||
db_name: "{{ jms_asset.spec_info.db_name }}"
|
||||
check_ssl: "{{ jms_asset.spec_info.use_ssl and not jms_asset.spec_info.allow_invalid_cert }}"
|
||||
ca_cert: "{{ jms_asset.secret_info.ca_cert | default('') }}"
|
||||
ssl_cert: "{{ jms_asset.secret_info.client_cert | default('') }}"
|
||||
ssl_key: "{{ jms_asset.secret_info.client_key | default('') }}"
|
||||
|
||||
tasks:
|
||||
- name: Test MySQL connection
|
||||
|
@ -13,9 +16,9 @@
|
|||
login_host: "{{ jms_asset.address }}"
|
||||
login_port: "{{ jms_asset.port }}"
|
||||
check_hostname: "{{ check_ssl if check_ssl else omit }}"
|
||||
ca_cert: "{{ jms_asset.secret_info.ca_cert | default(omit) if check_ssl else omit }}"
|
||||
client_cert: "{{ jms_asset.secret_info.client_cert | default(omit) if check_ssl else omit }}"
|
||||
client_key: "{{ jms_asset.secret_info.client_key | default(omit) if check_ssl else omit }}"
|
||||
ca_cert: "{{ ca_cert if check_ssl and ca_cert | length > 0 else omit }}"
|
||||
client_cert: "{{ ssl_cert if check_ssl and ssl_cert | length > 0 else omit }}"
|
||||
client_key: "{{ ssl_key if check_ssl and ssl_key | length > 0 else omit }}"
|
||||
filter: version
|
||||
register: db_info
|
||||
|
||||
|
@ -30,9 +33,9 @@
|
|||
login_host: "{{ jms_asset.address }}"
|
||||
login_port: "{{ jms_asset.port }}"
|
||||
check_hostname: "{{ check_ssl if check_ssl else omit }}"
|
||||
ca_cert: "{{ jms_asset.secret_info.ca_cert | default(omit) if check_ssl else omit }}"
|
||||
client_cert: "{{ jms_asset.secret_info.client_cert | default(omit) if check_ssl else omit }}"
|
||||
client_key: "{{ jms_asset.secret_info.client_key | default(omit) if check_ssl else omit }}"
|
||||
ca_cert: "{{ ca_cert if check_ssl and ca_cert | length > 0 else omit }}"
|
||||
client_cert: "{{ ssl_cert if check_ssl and ssl_cert | length > 0 else omit }}"
|
||||
client_key: "{{ ssl_key if check_ssl and ssl_key | length > 0 else omit }}"
|
||||
name: "{{ account.username }}"
|
||||
password: "{{ account.secret }}"
|
||||
host: "%"
|
||||
|
@ -47,7 +50,7 @@
|
|||
login_host: "{{ jms_asset.address }}"
|
||||
login_port: "{{ jms_asset.port }}"
|
||||
check_hostname: "{{ check_ssl if check_ssl else omit }}"
|
||||
ca_cert: "{{ jms_asset.secret_info.ca_cert | default(omit) if check_ssl else omit }}"
|
||||
client_cert: "{{ jms_asset.secret_info.client_cert | default(omit) if check_ssl else omit }}"
|
||||
client_key: "{{ jms_asset.secret_info.client_key | default(omit) if check_ssl else omit }}"
|
||||
ca_cert: "{{ ca_cert if check_ssl and ca_cert | length > 0 else omit }}"
|
||||
client_cert: "{{ ssl_cert if check_ssl and ssl_cert | length > 0 else omit }}"
|
||||
client_key: "{{ ssl_key if check_ssl and ssl_key | length > 0 else omit }}"
|
||||
filter: version
|
||||
|
|
|
@ -2,6 +2,10 @@
|
|||
gather_facts: no
|
||||
vars:
|
||||
ansible_python_interpreter: /opt/py3/bin/python
|
||||
check_ssl: "{{ jms_asset.spec_info.use_ssl }}"
|
||||
ca_cert: "{{ jms_asset.secret_info.ca_cert | default('') }}"
|
||||
ssl_cert: "{{ jms_asset.secret_info.client_cert | default('') }}"
|
||||
ssl_key: "{{ jms_asset.secret_info.client_key | default('') }}"
|
||||
|
||||
tasks:
|
||||
- name: Test PostgreSQL connection
|
||||
|
@ -11,6 +15,10 @@
|
|||
login_host: "{{ jms_asset.address }}"
|
||||
login_port: "{{ jms_asset.port }}"
|
||||
login_db: "{{ jms_asset.spec_info.db_name }}"
|
||||
ca_cert: "{{ ca_cert if check_ssl and ca_cert | length > 0 else omit }}"
|
||||
ssl_cert: "{{ ssl_cert if check_ssl and ssl_cert | length > 0 else omit }}"
|
||||
ssl_key: "{{ ssl_key if check_ssl and ssl_key | length > 0 else omit }}"
|
||||
ssl_mode: "{{ jms_asset.spec_info.pg_ssl_mode }}"
|
||||
register: result
|
||||
failed_when: not result.is_available
|
||||
|
||||
|
@ -28,6 +36,10 @@
|
|||
db: "{{ jms_asset.spec_info.db_name }}"
|
||||
name: "{{ account.username }}"
|
||||
password: "{{ account.secret }}"
|
||||
ca_cert: "{{ ca_cert if check_ssl and ca_cert | length > 0 else omit }}"
|
||||
ssl_cert: "{{ ssl_cert if check_ssl and ssl_cert | length > 0 else omit }}"
|
||||
ssl_key: "{{ ssl_key if check_ssl and ssl_key | length > 0 else omit }}"
|
||||
ssl_mode: "{{ jms_asset.spec_info.pg_ssl_mode }}"
|
||||
role_attr_flags: LOGIN
|
||||
ignore_errors: true
|
||||
when: result is succeeded
|
||||
|
@ -40,6 +52,10 @@
|
|||
login_host: "{{ jms_asset.address }}"
|
||||
login_port: "{{ jms_asset.port }}"
|
||||
db: "{{ jms_asset.spec_info.db_name }}"
|
||||
ca_cert: "{{ ca_cert if check_ssl and ca_cert | length > 0 else omit }}"
|
||||
ssl_cert: "{{ ssl_cert if check_ssl and ssl_cert | length > 0 else omit }}"
|
||||
ssl_key: "{{ ssl_key if check_ssl and ssl_key | length > 0 else omit }}"
|
||||
ssl_mode: "{{ jms_asset.spec_info.pg_ssl_mode }}"
|
||||
when:
|
||||
- result is succeeded
|
||||
- change_info is succeeded
|
||||
|
|
|
@ -14,27 +14,15 @@
|
|||
- name: "Add {{ account.username }} user"
|
||||
ansible.builtin.user:
|
||||
name: "{{ account.username }}"
|
||||
shell: "{{ params.shell }}"
|
||||
home: "{{ params.home | default('/home/' + account.username, true) }}"
|
||||
groups: "{{ params.groups }}"
|
||||
uid: "{{ params.uid | int if params.uid | length > 0 else omit }}"
|
||||
shell: "{{ params.shell if params.shell | length > 0 else omit }}"
|
||||
home: "{{ params.home if params.home | length > 0 else '/home/' + account.username }}"
|
||||
groups: "{{ params.groups if params.groups | length > 0 else omit }}"
|
||||
append: yes
|
||||
expires: -1
|
||||
state: present
|
||||
when: user_info.failed
|
||||
|
||||
- name: "Add {{ account.username }} group"
|
||||
ansible.builtin.group:
|
||||
name: "{{ account.username }}"
|
||||
state: present
|
||||
when: user_info.failed
|
||||
|
||||
- name: "Add {{ account.username }} user to group"
|
||||
ansible.builtin.user:
|
||||
name: "{{ account.username }}"
|
||||
groups: "{{ params.groups }}"
|
||||
when:
|
||||
- user_info.failed
|
||||
- params.groups
|
||||
|
||||
- name: "Set {{ account.username }} sudo setting"
|
||||
ansible.builtin.lineinfile:
|
||||
dest: /etc/sudoers
|
||||
|
@ -54,14 +42,40 @@
|
|||
ignore_errors: true
|
||||
when: account.secret_type == "password"
|
||||
|
||||
- name: remove jumpserver ssh key
|
||||
- name: "Get home directory for {{ account.username }}"
|
||||
ansible.builtin.shell: "getent passwd {{ account.username }} | cut -d: -f6"
|
||||
register: home_dir
|
||||
when: account.secret_type == "ssh_key"
|
||||
ignore_errors: yes
|
||||
|
||||
- name: "Check if home directory exists for {{ account.username }}"
|
||||
ansible.builtin.stat:
|
||||
path: "{{ home_dir.stdout.strip() }}"
|
||||
register: home_dir_stat
|
||||
when: account.secret_type == "ssh_key"
|
||||
ignore_errors: yes
|
||||
|
||||
- name: "Ensure {{ account.username }} home directory exists"
|
||||
ansible.builtin.file:
|
||||
path: "{{ home_dir.stdout.strip() }}"
|
||||
state: directory
|
||||
owner: "{{ account.username }}"
|
||||
group: "{{ account.username }}"
|
||||
mode: '0750'
|
||||
when:
|
||||
- account.secret_type == "ssh_key"
|
||||
- home_dir_stat.stat.exists == false
|
||||
ignore_errors: yes
|
||||
|
||||
- name: Remove jumpserver ssh key
|
||||
ansible.builtin.lineinfile:
|
||||
dest: "{{ ssh_params.dest }}"
|
||||
dest: "{{ home_dir.stdout.strip() }}/.ssh/authorized_keys"
|
||||
regexp: "{{ ssh_params.regexp }}"
|
||||
state: absent
|
||||
when:
|
||||
- account.secret_type == "ssh_key"
|
||||
- ssh_params.strategy == "set_jms"
|
||||
ignore_errors: yes
|
||||
|
||||
- name: "Change {{ account.username }} SSH key"
|
||||
ansible.builtin.authorized_key:
|
||||
|
@ -79,7 +93,7 @@
|
|||
login_password: "{{ account.secret }}"
|
||||
login_host: "{{ jms_asset.address }}"
|
||||
login_port: "{{ jms_asset.port }}"
|
||||
gateway_args: "{{ jms_asset.ansible_ssh_common_args | default('') }}"
|
||||
gateway_args: "{{ jms_asset.ansible_ssh_common_args | default(None) }}"
|
||||
become: "{{ account.become.ansible_become | default(False) }}"
|
||||
become_method: su
|
||||
become_user: "{{ account.become.ansible_user | default('') }}"
|
||||
|
@ -95,7 +109,7 @@
|
|||
login_port: "{{ jms_asset.port }}"
|
||||
login_user: "{{ account.username }}"
|
||||
login_private_key_path: "{{ account.private_key_path }}"
|
||||
gateway_args: "{{ jms_asset.ansible_ssh_common_args | default('') }}"
|
||||
gateway_args: "{{ jms_asset.ansible_ssh_common_args | default(None) }}"
|
||||
old_ssh_version: "{{ jms_asset.old_ssh_version | default(False) }}"
|
||||
when: account.secret_type == "ssh_key"
|
||||
delegate_to: localhost
|
||||
|
|
|
@ -34,6 +34,12 @@ params:
|
|||
default: ''
|
||||
help_text: "{{ 'Params groups help text' | trans }}"
|
||||
|
||||
- name: uid
|
||||
type: str
|
||||
label: "{{ 'Params uid label' | trans }}"
|
||||
default: ''
|
||||
help_text: "{{ 'Params uid help text' | trans }}"
|
||||
|
||||
i18n:
|
||||
Aix account push:
|
||||
zh: '使用 Ansible 模块 user 执行 Aix 账号推送 (DES)'
|
||||
|
@ -60,6 +66,11 @@ i18n:
|
|||
ja: 'グループを入力してください。複数のグループはコンマで区切ってください(既存のグループを入力してください)'
|
||||
en: 'Please enter the group. Multiple groups are separated by commas (please enter the existing group)'
|
||||
|
||||
Params uid help text:
|
||||
zh: '请输入用户ID'
|
||||
ja: 'ユーザーIDを入力してください'
|
||||
en: 'Please enter the user ID'
|
||||
|
||||
Modify sudo label:
|
||||
zh: '修改 sudo 权限'
|
||||
ja: 'sudo 権限を変更'
|
||||
|
@ -75,3 +86,7 @@ i18n:
|
|||
ja: 'グループ'
|
||||
en: 'Groups'
|
||||
|
||||
Params uid label:
|
||||
zh: '用户ID'
|
||||
ja: 'ユーザーID'
|
||||
en: 'User ID'
|
||||
|
|
|
@ -14,27 +14,15 @@
|
|||
- name: "Add {{ account.username }} user"
|
||||
ansible.builtin.user:
|
||||
name: "{{ account.username }}"
|
||||
shell: "{{ params.shell }}"
|
||||
home: "{{ params.home | default('/home/' + account.username, true) }}"
|
||||
groups: "{{ params.groups }}"
|
||||
uid: "{{ params.uid | int if params.uid | length > 0 else omit }}"
|
||||
shell: "{{ params.shell if params.shell | length > 0 else omit }}"
|
||||
home: "{{ params.home if params.home | length > 0 else '/home/' + account.username }}"
|
||||
groups: "{{ params.groups if params.groups | length > 0 else omit }}"
|
||||
append: yes
|
||||
expires: -1
|
||||
state: present
|
||||
when: user_info.failed
|
||||
|
||||
- name: "Add {{ account.username }} group"
|
||||
ansible.builtin.group:
|
||||
name: "{{ account.username }}"
|
||||
state: present
|
||||
when: user_info.failed
|
||||
|
||||
- name: "Add {{ account.username }} user to group"
|
||||
ansible.builtin.user:
|
||||
name: "{{ account.username }}"
|
||||
groups: "{{ params.groups }}"
|
||||
when:
|
||||
- user_info.failed
|
||||
- params.groups
|
||||
|
||||
- name: "Set {{ account.username }} sudo setting"
|
||||
ansible.builtin.lineinfile:
|
||||
dest: /etc/sudoers
|
||||
|
@ -54,14 +42,40 @@
|
|||
ignore_errors: true
|
||||
when: account.secret_type == "password"
|
||||
|
||||
- name: remove jumpserver ssh key
|
||||
- name: "Get home directory for {{ account.username }}"
|
||||
ansible.builtin.shell: "getent passwd {{ account.username }} | cut -d: -f6"
|
||||
register: home_dir
|
||||
when: account.secret_type == "ssh_key"
|
||||
ignore_errors: yes
|
||||
|
||||
- name: "Check if home directory exists for {{ account.username }}"
|
||||
ansible.builtin.stat:
|
||||
path: "{{ home_dir.stdout.strip() }}"
|
||||
register: home_dir_stat
|
||||
when: account.secret_type == "ssh_key"
|
||||
ignore_errors: yes
|
||||
|
||||
- name: "Ensure {{ account.username }} home directory exists"
|
||||
ansible.builtin.file:
|
||||
path: "{{ home_dir.stdout.strip() }}"
|
||||
state: directory
|
||||
owner: "{{ account.username }}"
|
||||
group: "{{ account.username }}"
|
||||
mode: '0750'
|
||||
when:
|
||||
- account.secret_type == "ssh_key"
|
||||
- home_dir_stat.stat.exists == false
|
||||
ignore_errors: yes
|
||||
|
||||
- name: Remove jumpserver ssh key
|
||||
ansible.builtin.lineinfile:
|
||||
dest: "{{ ssh_params.dest }}"
|
||||
dest: "{{ home_dir.stdout.strip() }}/.ssh/authorized_keys"
|
||||
regexp: "{{ ssh_params.regexp }}"
|
||||
state: absent
|
||||
when:
|
||||
- account.secret_type == "ssh_key"
|
||||
- ssh_params.strategy == "set_jms"
|
||||
ignore_errors: yes
|
||||
|
||||
- name: "Change {{ account.username }} SSH key"
|
||||
ansible.builtin.authorized_key:
|
||||
|
@ -79,7 +93,7 @@
|
|||
login_password: "{{ account.secret }}"
|
||||
login_host: "{{ jms_asset.address }}"
|
||||
login_port: "{{ jms_asset.port }}"
|
||||
gateway_args: "{{ jms_asset.ansible_ssh_common_args | default('') }}"
|
||||
gateway_args: "{{ jms_asset.ansible_ssh_common_args | default(None) }}"
|
||||
become: "{{ account.become.ansible_become | default(False) }}"
|
||||
become_method: su
|
||||
become_user: "{{ account.become.ansible_user | default('') }}"
|
||||
|
@ -95,7 +109,7 @@
|
|||
login_port: "{{ jms_asset.port }}"
|
||||
login_user: "{{ account.username }}"
|
||||
login_private_key_path: "{{ account.private_key_path }}"
|
||||
gateway_args: "{{ jms_asset.ansible_ssh_common_args | default('') }}"
|
||||
gateway_args: "{{ jms_asset.ansible_ssh_common_args | default(None) }}"
|
||||
old_ssh_version: "{{ jms_asset.old_ssh_version | default(False) }}"
|
||||
when: account.secret_type == "ssh_key"
|
||||
delegate_to: localhost
|
||||
|
|
|
@ -36,6 +36,12 @@ params:
|
|||
default: ''
|
||||
help_text: "{{ 'Params groups help text' | trans }}"
|
||||
|
||||
- name: uid
|
||||
type: str
|
||||
label: "{{ 'Params uid label' | trans }}"
|
||||
default: ''
|
||||
help_text: "{{ 'Params uid help text' | trans }}"
|
||||
|
||||
i18n:
|
||||
Posix account push:
|
||||
zh: '使用 Ansible 模块 user 执行账号推送 (sha512)'
|
||||
|
@ -62,6 +68,11 @@ i18n:
|
|||
ja: 'グループを入力してください。複数のグループはコンマで区切ってください(既存のグループを入力してください)'
|
||||
en: 'Please enter the group. Multiple groups are separated by commas (please enter the existing group)'
|
||||
|
||||
Params uid help text:
|
||||
zh: '请输入用户ID'
|
||||
ja: 'ユーザーIDを入力してください'
|
||||
en: 'Please enter the user ID'
|
||||
|
||||
Modify sudo label:
|
||||
zh: '修改 sudo 权限'
|
||||
ja: 'sudo 権限を変更'
|
||||
|
@ -75,4 +86,9 @@ i18n:
|
|||
Params groups label:
|
||||
zh: '用户组'
|
||||
ja: 'グループ'
|
||||
en: 'Groups'
|
||||
en: 'Groups'
|
||||
|
||||
Params uid label:
|
||||
zh: '用户ID'
|
||||
ja: 'ユーザーID'
|
||||
en: 'User ID'
|
|
@ -25,11 +25,11 @@
|
|||
|
||||
- name: Verify password (pyfreerdp)
|
||||
rdp_ping:
|
||||
login_host: "{{ jms_asset.address }}"
|
||||
login_host: "{{ jms_asset.origin_address }}"
|
||||
login_port: "{{ jms_asset.protocols | selectattr('name', 'equalto', 'rdp') | map(attribute='port') | first }}"
|
||||
login_user: "{{ account.username }}"
|
||||
login_password: "{{ account.secret }}"
|
||||
login_secret_type: "{{ account.secret_type }}"
|
||||
login_private_key_path: "{{ account.private_key_path }}"
|
||||
gateway_args: "{{ jms_gateway | default(None) }}"
|
||||
when: account.secret_type == "password"
|
||||
delegate_to: localhost
|
||||
|
|
|
@ -3,6 +3,9 @@
|
|||
vars:
|
||||
ansible_python_interpreter: /opt/py3/bin/python
|
||||
check_ssl: "{{ jms_asset.spec_info.use_ssl and not jms_asset.spec_info.allow_invalid_cert }}"
|
||||
ca_cert: "{{ jms_asset.secret_info.ca_cert | default('') }}"
|
||||
ssl_cert: "{{ jms_asset.secret_info.client_cert | default('') }}"
|
||||
ssl_key: "{{ jms_asset.secret_info.client_key | default('') }}"
|
||||
|
||||
tasks:
|
||||
- name: "Remove account"
|
||||
|
@ -12,8 +15,8 @@
|
|||
login_host: "{{ jms_asset.address }}"
|
||||
login_port: "{{ jms_asset.port }}"
|
||||
check_hostname: "{{ check_ssl if check_ssl else omit }}"
|
||||
ca_cert: "{{ jms_asset.secret_info.ca_cert | default(omit) if check_ssl else omit }}"
|
||||
client_cert: "{{ jms_asset.secret_info.client_cert | default(omit) if check_ssl else omit }}"
|
||||
client_key: "{{ jms_asset.secret_info.client_key | default(omit) if check_ssl else omit }}"
|
||||
ca_cert: "{{ ca_cert if check_ssl and ca_cert | length > 0 else omit }}"
|
||||
client_cert: "{{ ssl_cert if check_ssl and ssl_cert | length > 0 else omit }}"
|
||||
client_key: "{{ ssl_key if check_ssl and ssl_key | length > 0 else omit }}"
|
||||
name: "{{ account.username }}"
|
||||
state: absent
|
||||
|
|
|
@ -2,6 +2,10 @@
|
|||
gather_facts: no
|
||||
vars:
|
||||
ansible_python_interpreter: /opt/py3/bin/python
|
||||
check_ssl: "{{ jms_asset.spec_info.use_ssl }}"
|
||||
ca_cert: "{{ jms_asset.secret_info.ca_cert | default('') }}"
|
||||
ssl_cert: "{{ jms_asset.secret_info.client_cert | default('') }}"
|
||||
ssl_key: "{{ jms_asset.secret_info.client_key | default('') }}"
|
||||
|
||||
tasks:
|
||||
- name: "Remove account"
|
||||
|
@ -12,4 +16,8 @@
|
|||
login_port: "{{ jms_asset.port }}"
|
||||
db: "{{ jms_asset.spec_info.db_name }}"
|
||||
name: "{{ account.username }}"
|
||||
ca_cert: "{{ ca_cert if check_ssl and ca_cert | length > 0 else omit }}"
|
||||
ssl_cert: "{{ ssl_cert if check_ssl and ssl_cert | length > 0 else omit }}"
|
||||
ssl_key: "{{ ssl_key if check_ssl and ssl_key | length > 0 else omit }}"
|
||||
ssl_mode: "{{ jms_asset.spec_info.pg_ssl_mode }}"
|
||||
state: absent
|
||||
|
|
|
@ -13,4 +13,3 @@
|
|||
login_user: "{{ account.username }}"
|
||||
login_password: "{{ account.secret }}"
|
||||
login_secret_type: "{{ account.secret_type }}"
|
||||
login_private_key_path: "{{ account.private_key_path }}"
|
||||
|
|
|
@ -3,6 +3,9 @@
|
|||
vars:
|
||||
ansible_python_interpreter: /opt/py3/bin/python
|
||||
check_ssl: "{{ jms_asset.spec_info.use_ssl and not jms_asset.spec_info.allow_invalid_cert }}"
|
||||
ca_cert: "{{ jms_asset.secret_info.ca_cert | default('') }}"
|
||||
ssl_cert: "{{ jms_asset.secret_info.client_cert | default('') }}"
|
||||
ssl_key: "{{ jms_asset.secret_info.client_key | default('') }}"
|
||||
|
||||
tasks:
|
||||
- name: Verify account
|
||||
|
@ -12,7 +15,7 @@
|
|||
login_host: "{{ jms_asset.address }}"
|
||||
login_port: "{{ jms_asset.port }}"
|
||||
check_hostname: "{{ check_ssl if check_ssl else omit }}"
|
||||
ca_cert: "{{ jms_asset.secret_info.ca_cert | default(omit) if check_ssl else omit }}"
|
||||
client_cert: "{{ jms_asset.secret_info.client_cert | default(omit) if check_ssl else omit }}"
|
||||
client_key: "{{ jms_asset.secret_info.client_key | default(omit) if check_ssl else omit }}"
|
||||
ca_cert: "{{ ca_cert if check_ssl and ca_cert | length > 0 else omit }}"
|
||||
client_cert: "{{ ssl_cert if check_ssl and ssl_cert | length > 0 else omit }}"
|
||||
client_key: "{{ ssl_key if check_ssl and ssl_key | length > 0 else omit }}"
|
||||
filter: version
|
||||
|
|
|
@ -2,6 +2,10 @@
|
|||
gather_facts: no
|
||||
vars:
|
||||
ansible_python_interpreter: /opt/py3/bin/python
|
||||
check_ssl: "{{ jms_asset.spec_info.use_ssl }}"
|
||||
ca_cert: "{{ jms_asset.secret_info.ca_cert | default('') }}"
|
||||
ssl_cert: "{{ jms_asset.secret_info.client_cert | default('') }}"
|
||||
ssl_key: "{{ jms_asset.secret_info.client_key | default('') }}"
|
||||
|
||||
tasks:
|
||||
- name: Verify account
|
||||
|
@ -11,5 +15,9 @@
|
|||
login_host: "{{ jms_asset.address }}"
|
||||
login_port: "{{ jms_asset.port }}"
|
||||
db: "{{ jms_asset.spec_info.db_name }}"
|
||||
ca_cert: "{{ ca_cert if check_ssl and ca_cert | length > 0 else omit }}"
|
||||
ssl_cert: "{{ ssl_cert if check_ssl and ssl_cert | length > 0 else omit }}"
|
||||
ssl_key: "{{ ssl_key if check_ssl and ssl_key | length > 0 else omit }}"
|
||||
ssl_mode: "{{ jms_asset.spec_info.pg_ssl_mode }}"
|
||||
register: result
|
||||
failed_when: not result.is_available
|
||||
|
|
|
@ -0,0 +1,30 @@
|
|||
# Generated by Django 4.1.13 on 2024-08-26 09:05
|
||||
|
||||
from django.db import migrations, models
|
||||
import django.db.models.deletion
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('assets', '0005_myasset'),
|
||||
('accounts', '0003_automation'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AlterField(
|
||||
model_name='changesecretrecord',
|
||||
name='account',
|
||||
field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, to='accounts.account'),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='changesecretrecord',
|
||||
name='asset',
|
||||
field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, to='assets.asset'),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='changesecretrecord',
|
||||
name='execution',
|
||||
field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, to='accounts.automationexecution'),
|
||||
),
|
||||
]
|
|
@ -33,16 +33,15 @@ class ChangeSecretAutomation(ChangeSecretMixin, AccountBaseAutomation):
|
|||
|
||||
|
||||
class ChangeSecretRecord(JMSBaseModel):
|
||||
execution = models.ForeignKey('accounts.AutomationExecution', on_delete=models.CASCADE)
|
||||
asset = models.ForeignKey('assets.Asset', on_delete=models.CASCADE, null=True)
|
||||
account = models.ForeignKey('accounts.Account', on_delete=models.CASCADE, null=True)
|
||||
execution = models.ForeignKey('accounts.AutomationExecution', on_delete=models.SET_NULL, null=True)
|
||||
asset = models.ForeignKey('assets.Asset', on_delete=models.SET_NULL, null=True)
|
||||
account = models.ForeignKey('accounts.Account', on_delete=models.SET_NULL, null=True)
|
||||
old_secret = fields.EncryptTextField(blank=True, null=True, verbose_name=_('Old secret'))
|
||||
new_secret = fields.EncryptTextField(blank=True, null=True, verbose_name=_('New secret'))
|
||||
date_started = models.DateTimeField(blank=True, null=True, verbose_name=_('Date started'))
|
||||
date_finished = models.DateTimeField(blank=True, null=True, verbose_name=_('Date finished'))
|
||||
status = models.CharField(
|
||||
max_length=16, verbose_name=_('Status'),
|
||||
default=ChangeSecretRecordStatusChoice.pending.value
|
||||
max_length=16, verbose_name=_('Status'), default=ChangeSecretRecordStatusChoice.pending.value
|
||||
)
|
||||
error = models.TextField(blank=True, null=True, verbose_name=_('Error'))
|
||||
|
||||
|
@ -51,4 +50,4 @@ class ChangeSecretRecord(JMSBaseModel):
|
|||
verbose_name = _("Change secret record")
|
||||
|
||||
def __str__(self):
|
||||
return self.account.__str__()
|
||||
return f'{self.account.username}@{self.asset}'
|
||||
|
|
|
@ -178,7 +178,7 @@ class AccountCreateUpdateSerializerMixin(serializers.Serializer):
|
|||
instance.save()
|
||||
return instance, 'updated'
|
||||
else:
|
||||
raise serializers.ValidationError('Account already exists')
|
||||
raise serializers.ValidationError(_('Account already exists'))
|
||||
|
||||
def create(self, validated_data):
|
||||
push_now = validated_data.pop('push_now', None)
|
||||
|
@ -247,6 +247,7 @@ class AccountSerializer(AccountCreateUpdateSerializerMixin, BaseAccountSerialize
|
|||
'name': {'required': False},
|
||||
'source_id': {'required': False, 'allow_null': True},
|
||||
}
|
||||
fields_unimport_template = ['params']
|
||||
|
||||
@classmethod
|
||||
def setup_eager_loading(cls, queryset):
|
||||
|
|
|
@ -19,6 +19,16 @@ class PasswordRulesSerializer(serializers.Serializer):
|
|||
default='', allow_blank=True, max_length=16, label=_('Exclude symbol')
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def get_render_help_text():
|
||||
return _("""length is the length of the password, and the range is 8 to 30.
|
||||
lowercase indicates whether the password contains lowercase letters,
|
||||
uppercase indicates whether it contains uppercase letters,
|
||||
digit indicates whether it contains numbers, and symbol indicates whether it contains special symbols.
|
||||
exclude_symbols is used to exclude specific symbols. You can fill in the symbol characters to be excluded (up to 16).
|
||||
If you do not need to exclude symbols, you can leave it blank.
|
||||
default: {"length": 16, "lowercase": true, "uppercase": true, "digit": true, "symbol": true, "exclude_symbols": ""}""")
|
||||
|
||||
|
||||
class AccountTemplateSerializer(BaseAccountSerializer):
|
||||
password_rules = PasswordRulesSerializer(required=False, label=_('Password rules'))
|
||||
|
@ -46,6 +56,7 @@ class AccountTemplateSerializer(BaseAccountSerializer):
|
|||
'required': False
|
||||
},
|
||||
}
|
||||
fields_unimport_template = ['push_params']
|
||||
|
||||
@staticmethod
|
||||
def generate_secret(attrs):
|
||||
|
|
|
@ -1,9 +1,15 @@
|
|||
import datetime
|
||||
|
||||
from celery import shared_task
|
||||
from django.db.models import Q
|
||||
from django.utils import timezone
|
||||
from django.utils.translation import gettext_lazy as _, gettext_noop
|
||||
|
||||
from accounts.const import AutomationTypes
|
||||
from accounts.tasks.common import quickstart_automation_by_snapshot
|
||||
from common.utils import get_logger, get_object_or_none
|
||||
from common.const.crontab import CRONTAB_AT_AM_THREE
|
||||
from common.utils import get_logger, get_object_or_none, get_log_keep_day
|
||||
from ops.celery.decorator import register_as_period_task
|
||||
from orgs.utils import tmp_to_org, tmp_to_root_org
|
||||
|
||||
logger = get_logger(__file__)
|
||||
|
@ -22,8 +28,14 @@ def task_activity_callback(self, pid, trigger, tp, *args, **kwargs):
|
|||
|
||||
|
||||
@shared_task(
|
||||
queue='ansible', verbose_name=_('Account execute automation'),
|
||||
activity_callback=task_activity_callback
|
||||
queue='ansible',
|
||||
verbose_name=_('Account execute automation'),
|
||||
activity_callback=task_activity_callback,
|
||||
description=_(
|
||||
"""Unified execution entry for account automation tasks: when the system performs tasks
|
||||
such as account push, password change, account verification, account collection,
|
||||
and gateway account verification, all tasks are executed through this unified entry"""
|
||||
)
|
||||
)
|
||||
def execute_account_automation_task(pid, trigger, tp):
|
||||
model = AutomationTypes.get_type_model(tp)
|
||||
|
@ -48,8 +60,12 @@ def record_task_activity_callback(self, record_ids, *args, **kwargs):
|
|||
|
||||
|
||||
@shared_task(
|
||||
queue='ansible', verbose_name=_('Execute automation record'),
|
||||
activity_callback=record_task_activity_callback
|
||||
queue='ansible',
|
||||
verbose_name=_('Execute automation record'),
|
||||
activity_callback=record_task_activity_callback,
|
||||
description=_(
|
||||
"""When manually executing password change records, this task is used"""
|
||||
)
|
||||
)
|
||||
def execute_automation_record_task(record_ids, tp):
|
||||
from accounts.models import ChangeSecretRecord
|
||||
|
@ -74,3 +90,33 @@ def execute_automation_record_task(record_ids, tp):
|
|||
}
|
||||
with tmp_to_org(record.execution.org_id):
|
||||
quickstart_automation_by_snapshot(task_name, tp, task_snapshot)
|
||||
|
||||
|
||||
@shared_task(
|
||||
verbose_name=_('Clean change secret and push record period'),
|
||||
description=_(
|
||||
"""The system will periodically clean up unnecessary password change and push records,
|
||||
including their associated change tasks, execution logs, assets, and accounts. When any
|
||||
of these associated items are deleted, the corresponding password change and push records
|
||||
become invalid. Therefore, to maintain a clean and efficient database, the system will
|
||||
clean up expired records at 2 a.m daily, based on the interval specified by
|
||||
PERM_EXPIRED_CHECK_PERIODIC in the config.txt configuration file. This periodic cleanup
|
||||
mechanism helps free up storage space and enhances the security and overall performance
|
||||
of data management"""
|
||||
)
|
||||
)
|
||||
@register_as_period_task(crontab=CRONTAB_AT_AM_THREE)
|
||||
def clean_change_secret_and_push_record_period():
|
||||
from accounts.models import ChangeSecretRecord
|
||||
print('Start clean change secret and push record period')
|
||||
with tmp_to_root_org():
|
||||
now = timezone.now()
|
||||
days = get_log_keep_day('ACCOUNT_CHANGE_SECRET_RECORD_KEEP_DAYS')
|
||||
expired_day = now - datetime.timedelta(days=days)
|
||||
records = ChangeSecretRecord.objects.filter(
|
||||
date_updated__lt=expired_day
|
||||
).filter(
|
||||
Q(execution__isnull=True) | Q(asset__isnull=True) | Q(account__isnull=True)
|
||||
)
|
||||
|
||||
records.delete()
|
||||
|
|
|
@ -22,7 +22,13 @@ def task_activity_callback(self, pid, trigger, *args, **kwargs):
|
|||
return resource_ids, org_id
|
||||
|
||||
|
||||
@shared_task(verbose_name=_('Execute account backup plan'), activity_callback=task_activity_callback)
|
||||
@shared_task(
|
||||
verbose_name=_('Execute account backup plan'),
|
||||
activity_callback=task_activity_callback,
|
||||
description=_(
|
||||
"When performing scheduled or manual account backups, this task is used"
|
||||
)
|
||||
)
|
||||
def execute_account_backup_task(pid, trigger, **kwargs):
|
||||
from accounts.models import AccountBackupAutomation
|
||||
with tmp_to_root_org():
|
||||
|
|
|
@ -26,8 +26,10 @@ def gather_asset_accounts_util(nodes, task_name):
|
|||
|
||||
|
||||
@shared_task(
|
||||
queue="ansible", verbose_name=_('Gather asset accounts'),
|
||||
activity_callback=lambda self, node_ids, task_name=None, *args, **kwargs: (node_ids, None)
|
||||
queue="ansible",
|
||||
verbose_name=_('Gather asset accounts'),
|
||||
activity_callback=lambda self, node_ids, task_name=None, *args, **kwargs: (node_ids, None),
|
||||
description=_("Unused")
|
||||
)
|
||||
def gather_asset_accounts_task(node_ids, task_name=None):
|
||||
if task_name is None:
|
||||
|
|
|
@ -12,8 +12,12 @@ __all__ = [
|
|||
|
||||
|
||||
@shared_task(
|
||||
queue="ansible", verbose_name=_('Push accounts to assets'),
|
||||
activity_callback=lambda self, account_ids, *args, **kwargs: (account_ids, None)
|
||||
queue="ansible",
|
||||
verbose_name=_('Push accounts to assets'),
|
||||
activity_callback=lambda self, account_ids, *args, **kwargs: (account_ids, None),
|
||||
description=_(
|
||||
"When creating or modifying an account requires account push, this task is executed"
|
||||
)
|
||||
)
|
||||
def push_accounts_to_assets_task(account_ids, params=None):
|
||||
from accounts.models import PushAccountAutomation
|
||||
|
|
|
@ -21,8 +21,13 @@ __all__ = ['remove_accounts_task']
|
|||
|
||||
|
||||
@shared_task(
|
||||
queue="ansible", verbose_name=_('Remove account'),
|
||||
activity_callback=lambda self, gather_account_ids, *args, **kwargs: (gather_account_ids, None)
|
||||
queue="ansible",
|
||||
verbose_name=_('Remove account'),
|
||||
activity_callback=lambda self, gather_account_ids, *args, **kwargs: (gather_account_ids, None),
|
||||
description=_(
|
||||
"""When clicking "Sync deletion" in 'Console - Gather Account - Gathered accounts' this
|
||||
task will be executed"""
|
||||
)
|
||||
)
|
||||
def remove_accounts_task(gather_account_ids):
|
||||
from accounts.models import GatheredAccount
|
||||
|
@ -41,7 +46,15 @@ def remove_accounts_task(gather_account_ids):
|
|||
quickstart_automation_by_snapshot(task_name, tp, task_snapshot)
|
||||
|
||||
|
||||
@shared_task(verbose_name=_('Clean historical accounts'))
|
||||
@shared_task(
|
||||
verbose_name=_('Clean historical accounts'),
|
||||
description=_(
|
||||
"""Each time an asset account is updated, a historical account is generated, so it is
|
||||
necessary to clean up the asset account history. The system will clean up excess account
|
||||
records at 2 a.m. daily based on the configuration in the "System settings - Features -
|
||||
Account storage - Record limit"""
|
||||
)
|
||||
)
|
||||
@register_as_period_task(crontab=CRONTAB_AT_AM_TWO)
|
||||
@tmp_to_root_org()
|
||||
def clean_historical_accounts():
|
||||
|
|
|
@ -9,7 +9,11 @@ from orgs.utils import tmp_to_root_org, tmp_to_org
|
|||
|
||||
@shared_task(
|
||||
verbose_name=_('Template sync info to related accounts'),
|
||||
activity_callback=lambda self, template_id, *args, **kwargs: (template_id, None)
|
||||
activity_callback=lambda self, template_id, *args, **kwargs: (template_id, None),
|
||||
description=_(
|
||||
"""When clicking 'Sync new secret to accounts' in 'Console - Account - Templates -
|
||||
Accounts' this task will be executed"""
|
||||
)
|
||||
)
|
||||
def template_sync_related_accounts(template_id, user_id=None):
|
||||
from accounts.models import Account, AccountTemplate
|
||||
|
|
|
@ -28,7 +28,12 @@ def sync_instance(instance):
|
|||
return "succeeded", msg
|
||||
|
||||
|
||||
@shared_task(verbose_name=_('Sync secret to vault'))
|
||||
@shared_task(
|
||||
verbose_name=_('Sync secret to vault'),
|
||||
description=_(
|
||||
"When clicking 'Sync' in 'System Settings - Features - Account Storage' this task will be executed"
|
||||
)
|
||||
)
|
||||
def sync_secret_to_vault():
|
||||
if not vault_client.enabled:
|
||||
# 这里不能判断 settings.VAULT_ENABLED, 必须判断当前 vault_client 的类型
|
||||
|
|
|
@ -4,7 +4,6 @@ from django.utils.translation import gettext_noop
|
|||
|
||||
from accounts.const import AutomationTypes
|
||||
from accounts.tasks.common import quickstart_automation_by_snapshot
|
||||
from assets.const import GATEWAY_NAME
|
||||
from common.utils import get_logger
|
||||
from orgs.utils import org_aware_func
|
||||
|
||||
|
@ -32,13 +31,13 @@ def verify_accounts_connectivity_util(accounts, task_name):
|
|||
asset_ids = [a.asset_id for a in accounts]
|
||||
assets = Asset.objects.filter(id__in=asset_ids)
|
||||
|
||||
gateways = assets.filter(platform__name=GATEWAY_NAME)
|
||||
gateways = assets.gateways()
|
||||
verify_connectivity_util(
|
||||
gateways, AutomationTypes.verify_gateway_account,
|
||||
accounts, task_name
|
||||
)
|
||||
|
||||
common_assets = assets.exclude(platform__name=GATEWAY_NAME)
|
||||
common_assets = assets.gateways(0)
|
||||
verify_connectivity_util(
|
||||
common_assets, AutomationTypes.verify_account,
|
||||
accounts, task_name
|
||||
|
@ -46,8 +45,12 @@ def verify_accounts_connectivity_util(accounts, task_name):
|
|||
|
||||
|
||||
@shared_task(
|
||||
queue="ansible", verbose_name=_('Verify asset account availability'),
|
||||
activity_callback=lambda self, account_ids, *args, **kwargs: (account_ids, None)
|
||||
queue="ansible",
|
||||
verbose_name=_('Verify asset account availability'),
|
||||
activity_callback=lambda self, account_ids, *args, **kwargs: (account_ids, None),
|
||||
description=_(
|
||||
"When clicking 'Test' in 'Console - Asset details - Accounts' this task will be executed"
|
||||
)
|
||||
)
|
||||
def verify_accounts_connectivity_task(account_ids):
|
||||
from accounts.models import Account, VerifyAccountAutomation
|
||||
|
|
|
@ -1,18 +1,29 @@
|
|||
{% load i18n %}
|
||||
|
||||
<h3>{% trans 'Gather account change information' %}</h3>
|
||||
<table style="width: 100%; border-collapse: collapse; max-width: 100%; text-align: left; margin-top: 20px;">
|
||||
<h3></h3>
|
||||
<table style="width: 100%; border-collapse: collapse; table-layout: fixed; text-align: left; margin-top: 20px;">
|
||||
<caption></caption>
|
||||
<tr style="background-color: #f2f2f2;">
|
||||
<th style="border: 1px solid #ddd; padding: 10px;">{% trans 'Asset' %}</th>
|
||||
<th style="border: 1px solid #ddd; padding: 10px;">{% trans 'Added account' %}</th>
|
||||
<th style="border: 1px solid #ddd; padding: 10px;">{% trans 'Deleted account' %}</th>
|
||||
<th style="border: 1px solid #ddd; padding: 15px; text-align: left; vertical-align: top; line-height: 1.5;">
|
||||
{% trans 'Asset' %}
|
||||
</th>
|
||||
<th style="border: 1px solid #ddd; padding: 15px; text-align: left; vertical-align: top; line-height: 1.5;">
|
||||
{% trans 'Added account' %}
|
||||
</th>
|
||||
<th style="border: 1px solid #ddd; padding: 15px; text-align: left; vertical-align: top; line-height: 1.5;">
|
||||
{% trans 'Deleted account' %}
|
||||
</th>
|
||||
</tr>
|
||||
{% for name, change in change_info.items %}
|
||||
<tr style="{% cycle 'background-color: #ebf5ff;' 'background-color: #fff;' %}">
|
||||
<td style="border: 1px solid #ddd; padding: 10px;">{{ name }}</td>
|
||||
<td style="border: 1px solid #ddd; padding: 10px;">{{ change.add_usernames }}</td>
|
||||
<td style="border: 1px solid #ddd; padding: 10px;">{{ change.remove_usernames }}</td>
|
||||
<td style="border: 1px solid #ddd; padding: 10px; text-align: left; vertical-align: top;">
|
||||
{{ name | safe }}
|
||||
</td>
|
||||
<td style="border: 1px solid #ddd; padding: 10px; text-align: left; vertical-align: top;">
|
||||
{{ change.add_usernames | join:" " | safe }}
|
||||
</td>
|
||||
<td style="border: 1px solid #ddd; padding: 10px; text-align: left; vertical-align: top;">
|
||||
{{ change.remove_usernames | join:" " | safe }}
|
||||
</td>
|
||||
</tr>
|
||||
{% endfor %}
|
||||
</table>
|
||||
|
|
|
@ -8,3 +8,4 @@ class ActionChoices(models.TextChoices):
|
|||
review = 'review', _('Review')
|
||||
warning = 'warning', _('Warn')
|
||||
notice = 'notice', _('Notify')
|
||||
notify_and_warn = 'notify_and_warn', _('Notify and warn')
|
||||
|
|
|
@ -62,7 +62,7 @@ class ActionAclSerializer(serializers.Serializer):
|
|||
self.set_action_choices()
|
||||
|
||||
class Meta:
|
||||
action_choices_exclude = [ActionChoices.warning]
|
||||
action_choices_exclude = [ActionChoices.warning, ActionChoices.notify_and_warn]
|
||||
|
||||
def set_action_choices(self):
|
||||
field_action = self.fields.get("action")
|
||||
|
|
|
@ -2,10 +2,10 @@
|
|||
#
|
||||
from collections import defaultdict
|
||||
|
||||
import django_filters
|
||||
from django.conf import settings
|
||||
from django.shortcuts import get_object_or_404
|
||||
from django.utils.translation import gettext as _
|
||||
from django_filters import rest_framework as drf_filters
|
||||
from rest_framework import status
|
||||
from rest_framework.decorators import action
|
||||
from rest_framework.response import Response
|
||||
|
@ -22,6 +22,7 @@ from common.drf.filters import BaseFilterSet, AttrRulesFilterBackend
|
|||
from common.utils import get_logger, is_uuid
|
||||
from orgs.mixins import generics
|
||||
from orgs.mixins.api import OrgBulkModelViewSet
|
||||
from ...const import GATEWAY_NAME
|
||||
from ...notifications import BulkUpdatePlatformSkipAssetUserMsg
|
||||
|
||||
logger = get_logger(__file__)
|
||||
|
@ -32,31 +33,32 @@ __all__ = [
|
|||
|
||||
|
||||
class AssetFilterSet(BaseFilterSet):
|
||||
platform = django_filters.CharFilter(method='filter_platform')
|
||||
exclude_platform = django_filters.CharFilter(field_name="platform__name", lookup_expr='exact', exclude=True)
|
||||
domain = django_filters.CharFilter(method='filter_domain')
|
||||
type = django_filters.CharFilter(field_name="platform__type", lookup_expr="exact")
|
||||
category = django_filters.CharFilter(field_name="platform__category", lookup_expr="exact")
|
||||
protocols = django_filters.CharFilter(method='filter_protocols')
|
||||
domain_enabled = django_filters.BooleanFilter(
|
||||
platform = drf_filters.CharFilter(method='filter_platform')
|
||||
is_gateway = drf_filters.BooleanFilter(method='filter_is_gateway')
|
||||
exclude_platform = drf_filters.CharFilter(field_name="platform__name", lookup_expr='exact', exclude=True)
|
||||
domain = drf_filters.CharFilter(method='filter_domain')
|
||||
type = drf_filters.CharFilter(field_name="platform__type", lookup_expr="exact")
|
||||
category = drf_filters.CharFilter(field_name="platform__category", lookup_expr="exact")
|
||||
protocols = drf_filters.CharFilter(method='filter_protocols')
|
||||
domain_enabled = drf_filters.BooleanFilter(
|
||||
field_name="platform__domain_enabled", lookup_expr="exact"
|
||||
)
|
||||
ping_enabled = django_filters.BooleanFilter(
|
||||
ping_enabled = drf_filters.BooleanFilter(
|
||||
field_name="platform__automation__ping_enabled", lookup_expr="exact"
|
||||
)
|
||||
gather_facts_enabled = django_filters.BooleanFilter(
|
||||
gather_facts_enabled = drf_filters.BooleanFilter(
|
||||
field_name="platform__automation__gather_facts_enabled", lookup_expr="exact"
|
||||
)
|
||||
change_secret_enabled = django_filters.BooleanFilter(
|
||||
change_secret_enabled = drf_filters.BooleanFilter(
|
||||
field_name="platform__automation__change_secret_enabled", lookup_expr="exact"
|
||||
)
|
||||
push_account_enabled = django_filters.BooleanFilter(
|
||||
push_account_enabled = drf_filters.BooleanFilter(
|
||||
field_name="platform__automation__push_account_enabled", lookup_expr="exact"
|
||||
)
|
||||
verify_account_enabled = django_filters.BooleanFilter(
|
||||
verify_account_enabled = drf_filters.BooleanFilter(
|
||||
field_name="platform__automation__verify_account_enabled", lookup_expr="exact"
|
||||
)
|
||||
gather_accounts_enabled = django_filters.BooleanFilter(
|
||||
gather_accounts_enabled = drf_filters.BooleanFilter(
|
||||
field_name="platform__automation__gather_accounts_enabled", lookup_expr="exact"
|
||||
)
|
||||
|
||||
|
@ -71,9 +73,16 @@ class AssetFilterSet(BaseFilterSet):
|
|||
def filter_platform(queryset, name, value):
|
||||
if value.isdigit():
|
||||
return queryset.filter(platform_id=value)
|
||||
elif value == GATEWAY_NAME:
|
||||
return queryset.filter(platform__name__istartswith=GATEWAY_NAME)
|
||||
else:
|
||||
return queryset.filter(platform__name=value)
|
||||
|
||||
@staticmethod
|
||||
def filter_is_gateway(queryset, name, value):
|
||||
queryset = queryset.gateways(value)
|
||||
return queryset
|
||||
|
||||
@staticmethod
|
||||
def filter_domain(queryset, name, value):
|
||||
if is_uuid(value):
|
||||
|
@ -298,6 +307,7 @@ class AssetsTaskCreateApi(AssetsTaskMixin, generics.CreateAPIView):
|
|||
def check_permissions(self, request):
|
||||
action_perm_require = {
|
||||
"refresh": "assets.refresh_assethardwareinfo",
|
||||
"test": "assets.test_assetconnectivity",
|
||||
}
|
||||
_action = request.data.get("action")
|
||||
perm_required = action_perm_require.get(_action)
|
||||
|
|
|
@ -1,4 +1,5 @@
|
|||
from django.db.models import Count
|
||||
from django_filters import rest_framework as filters
|
||||
from rest_framework import generics
|
||||
from rest_framework import serializers
|
||||
from rest_framework.decorators import action
|
||||
|
@ -14,6 +15,14 @@ from common.serializers import GroupedChoiceSerializer
|
|||
__all__ = ['AssetPlatformViewSet', 'PlatformAutomationMethodsApi', 'PlatformProtocolViewSet']
|
||||
|
||||
|
||||
class PlatformFilter(filters.FilterSet):
|
||||
name__startswith = filters.CharFilter(field_name='name', lookup_expr='istartswith')
|
||||
|
||||
class Meta:
|
||||
model = Platform
|
||||
fields = ['name', 'category', 'type']
|
||||
|
||||
|
||||
class AssetPlatformViewSet(JMSModelViewSet):
|
||||
queryset = Platform.objects.all()
|
||||
serializer_classes = {
|
||||
|
@ -21,7 +30,7 @@ class AssetPlatformViewSet(JMSModelViewSet):
|
|||
'list': PlatformListSerializer,
|
||||
'categories': GroupedChoiceSerializer,
|
||||
}
|
||||
filterset_fields = ['name', 'category', 'type']
|
||||
filterset_class = PlatformFilter
|
||||
search_fields = ['name']
|
||||
ordering = ['-internal', 'name']
|
||||
rbac_perms = {
|
||||
|
|
|
@ -170,6 +170,7 @@ class BasePlaybookManager:
|
|||
result = self.write_cert_to_file(
|
||||
os.path.join(cert_dir, f), specific.get(f)
|
||||
)
|
||||
os.chmod(result, 0o600)
|
||||
host['jms_asset']['secret_info'][f] = result
|
||||
return host
|
||||
|
||||
|
|
|
@ -3,6 +3,9 @@
|
|||
vars:
|
||||
ansible_python_interpreter: /opt/py3/bin/python
|
||||
check_ssl: "{{ jms_asset.spec_info.use_ssl and not jms_asset.spec_info.allow_invalid_cert }}"
|
||||
ca_cert: "{{ jms_asset.secret_info.ca_cert | default('') }}"
|
||||
ssl_cert: "{{ jms_asset.secret_info.client_cert | default('') }}"
|
||||
ssl_key: "{{ jms_asset.secret_info.client_key | default('') }}"
|
||||
|
||||
tasks:
|
||||
- name: Get info
|
||||
|
@ -12,9 +15,9 @@
|
|||
login_host: "{{ jms_asset.address }}"
|
||||
login_port: "{{ jms_asset.port }}"
|
||||
check_hostname: "{{ check_ssl if check_ssl else omit }}"
|
||||
ca_cert: "{{ jms_asset.secret_info.ca_cert | default(omit) if check_ssl else omit }}"
|
||||
client_cert: "{{ jms_asset.secret_info.client_cert | default(omit) if check_ssl else omit }}"
|
||||
client_key: "{{ jms_asset.secret_info.client_key | default(omit) if check_ssl else omit }}"
|
||||
ca_cert: "{{ ca_cert if check_ssl and ca_cert | length > 0 else omit }}"
|
||||
client_cert: "{{ ssl_cert if check_ssl and ssl_cert | length > 0 else omit }}"
|
||||
client_key: "{{ ssl_key if check_ssl and ssl_key | length > 0 else omit }}"
|
||||
filter: version
|
||||
register: db_info
|
||||
|
||||
|
|
|
@ -2,6 +2,10 @@
|
|||
gather_facts: no
|
||||
vars:
|
||||
ansible_python_interpreter: /opt/py3/bin/python
|
||||
check_ssl: "{{ jms_asset.spec_info.use_ssl }}"
|
||||
ca_cert: "{{ jms_asset.secret_info.ca_cert | default('') }}"
|
||||
ssl_cert: "{{ jms_asset.secret_info.client_cert | default('') }}"
|
||||
ssl_key: "{{ jms_asset.secret_info.client_key | default('') }}"
|
||||
|
||||
tasks:
|
||||
- name: Get info
|
||||
|
@ -11,6 +15,10 @@
|
|||
login_host: "{{ jms_asset.address }}"
|
||||
login_port: "{{ jms_asset.port }}"
|
||||
login_db: "{{ jms_asset.spec_info.db_name }}"
|
||||
ca_cert: "{{ ca_cert if check_ssl and ca_cert | length > 0 else omit }}"
|
||||
ssl_cert: "{{ ssl_cert if check_ssl and ssl_cert | length > 0 else omit }}"
|
||||
ssl_key: "{{ ssl_key if check_ssl and ssl_key | length > 0 else omit }}"
|
||||
ssl_mode: "{{ jms_asset.spec_info.pg_ssl_mode }}"
|
||||
register: db_info
|
||||
|
||||
- name: Define info by set_fact
|
||||
|
|
|
@ -13,4 +13,3 @@
|
|||
login_host: "{{ jms_asset.address }}"
|
||||
login_port: "{{ jms_asset.port }}"
|
||||
login_secret_type: "{{ jms_account.secret_type }}"
|
||||
login_private_key_path: "{{ jms_account.private_key_path }}"
|
||||
|
|
|
@ -3,6 +3,9 @@
|
|||
vars:
|
||||
ansible_python_interpreter: /opt/py3/bin/python
|
||||
check_ssl: "{{ jms_asset.spec_info.use_ssl and not jms_asset.spec_info.allow_invalid_cert }}"
|
||||
ca_cert: "{{ jms_asset.secret_info.ca_cert | default('') }}"
|
||||
ssl_cert: "{{ jms_asset.secret_info.client_cert | default('') }}"
|
||||
ssl_key: "{{ jms_asset.secret_info.client_key | default('') }}"
|
||||
|
||||
tasks:
|
||||
- name: Test MySQL connection
|
||||
|
@ -12,7 +15,7 @@
|
|||
login_host: "{{ jms_asset.address }}"
|
||||
login_port: "{{ jms_asset.port }}"
|
||||
check_hostname: "{{ check_ssl if check_ssl else omit }}"
|
||||
ca_cert: "{{ jms_asset.secret_info.ca_cert | default(omit) if check_ssl else omit }}"
|
||||
client_cert: "{{ jms_asset.secret_info.client_cert | default(omit) if check_ssl else omit }}"
|
||||
client_key: "{{ jms_asset.secret_info.client_key | default(omit) if check_ssl else omit }}"
|
||||
ca_cert: "{{ ca_cert if check_ssl and ca_cert | length > 0 else omit }}"
|
||||
client_cert: "{{ ssl_cert if check_ssl and ssl_cert | length > 0 else omit }}"
|
||||
client_key: "{{ ssl_key if check_ssl and ssl_key | length > 0 else omit }}"
|
||||
filter: version
|
||||
|
|
|
@ -2,6 +2,10 @@
|
|||
gather_facts: no
|
||||
vars:
|
||||
ansible_python_interpreter: /opt/py3/bin/python
|
||||
check_ssl: "{{ jms_asset.spec_info.use_ssl }}"
|
||||
ca_cert: "{{ jms_asset.secret_info.ca_cert | default('') }}"
|
||||
ssl_cert: "{{ jms_asset.secret_info.client_cert | default('') }}"
|
||||
ssl_key: "{{ jms_asset.secret_info.client_key | default('') }}"
|
||||
|
||||
tasks:
|
||||
- name: Test PostgreSQL connection
|
||||
|
@ -11,5 +15,9 @@
|
|||
login_host: "{{ jms_asset.address }}"
|
||||
login_port: "{{ jms_asset.port }}"
|
||||
login_db: "{{ jms_asset.spec_info.db_name }}"
|
||||
ca_cert: "{{ ca_cert if check_ssl and ca_cert | length > 0 else omit }}"
|
||||
ssl_cert: "{{ ssl_cert if check_ssl and ssl_cert | length > 0 else omit }}"
|
||||
ssl_key: "{{ ssl_key if check_ssl and ssl_key | length > 0 else omit }}"
|
||||
ssl_mode: "{{ jms_asset.spec_info.pg_ssl_mode }}"
|
||||
register: result
|
||||
failed_when: not result.is_available
|
||||
|
|
|
@ -1,6 +1,7 @@
|
|||
from .automation import *
|
||||
from .base import *
|
||||
from .category import *
|
||||
from .database import *
|
||||
from .host import *
|
||||
from .platform import *
|
||||
from .protocol import *
|
||||
|
|
|
@ -1,3 +1,5 @@
|
|||
from django.db.models import TextChoices
|
||||
|
||||
from .base import BaseType
|
||||
|
||||
|
||||
|
@ -120,3 +122,10 @@ class DatabaseTypes(BaseType):
|
|||
cls.MYSQL, cls.MARIADB, cls.POSTGRESQL,
|
||||
cls.MONGODB, cls.REDIS,
|
||||
]
|
||||
|
||||
|
||||
class PostgresqlSSLMode(TextChoices):
|
||||
PREFER = 'prefer', 'Prefer'
|
||||
REQUIRE = 'require', 'Require'
|
||||
VERIFY_CA = 'verify-ca', 'Verify CA'
|
||||
VERIFY_FULL = 'verify-full', 'Verify Full'
|
||||
|
|
|
@ -45,6 +45,12 @@ class Protocol(ChoicesMixin, models.TextChoices):
|
|||
'default': False,
|
||||
'label': _('Old SSH version'),
|
||||
'help_text': _('Old SSH version like openssh 5.x or 6.x')
|
||||
},
|
||||
'nc': {
|
||||
'type': 'bool',
|
||||
'default': False,
|
||||
'label': 'Netcat (nc)',
|
||||
'help_text': _('Netcat help text')
|
||||
}
|
||||
}
|
||||
},
|
||||
|
|
|
@ -0,0 +1,23 @@
|
|||
# Generated by Django 4.1.13 on 2024-09-13 08:22
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
dependencies = [
|
||||
('assets', '0005_myasset'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name='database',
|
||||
name='pg_ssl_mode',
|
||||
field=models.CharField(choices=[
|
||||
('prefer', 'Prefer'),
|
||||
('require', 'Require'),
|
||||
('verify-ca', 'Verify CA'),
|
||||
('verify-full', 'Verify Full')
|
||||
], default='prefer',
|
||||
max_length=16, verbose_name='Postgresql SSL mode'),
|
||||
),
|
||||
]
|
|
@ -38,6 +38,13 @@ class AssetQuerySet(models.QuerySet):
|
|||
def valid(self):
|
||||
return self.active()
|
||||
|
||||
def gateways(self, is_gateway=1):
|
||||
kwargs = {'platform__name__startswith': 'Gateway'}
|
||||
if is_gateway:
|
||||
return self.filter(**kwargs)
|
||||
else:
|
||||
return self.exclude(**kwargs)
|
||||
|
||||
def has_protocol(self, name):
|
||||
return self.filter(protocols__contains=name)
|
||||
|
||||
|
@ -158,10 +165,16 @@ class Asset(NodesRelationMixin, LabeledMixin, AbsConnectivity, JSONFilterMixin,
|
|||
|
||||
name = models.CharField(max_length=128, verbose_name=_('Name'))
|
||||
address = models.CharField(max_length=767, verbose_name=_('Address'), db_index=True)
|
||||
platform = models.ForeignKey(Platform, on_delete=models.PROTECT, verbose_name=_("Platform"), related_name='assets')
|
||||
domain = models.ForeignKey("assets.Domain", null=True, blank=True, related_name='assets',
|
||||
verbose_name=_("Zone"), on_delete=models.SET_NULL)
|
||||
nodes = models.ManyToManyField('assets.Node', default=default_node, related_name='assets', verbose_name=_("Nodes"))
|
||||
platform = models.ForeignKey(
|
||||
Platform, on_delete=models.PROTECT, verbose_name=_("Platform"), related_name='assets'
|
||||
)
|
||||
domain = models.ForeignKey(
|
||||
"assets.Domain", null=True, blank=True, related_name='assets',
|
||||
verbose_name=_("Zone"), on_delete=models.SET_NULL
|
||||
)
|
||||
nodes = models.ManyToManyField(
|
||||
'assets.Node', default=default_node, related_name='assets', verbose_name=_("Nodes")
|
||||
)
|
||||
is_active = models.BooleanField(default=True, verbose_name=_('Active'))
|
||||
gathered_info = models.JSONField(verbose_name=_('Gathered info'), default=dict, blank=True) # 资产的一些信息,如 硬件信息
|
||||
custom_info = models.JSONField(verbose_name=_('Custom info'), default=dict)
|
||||
|
|
|
@ -1,6 +1,7 @@
|
|||
from django.db import models
|
||||
from django.utils.translation import gettext_lazy as _
|
||||
|
||||
from assets.const import PostgresqlSSLMode
|
||||
from common.db.fields import EncryptTextField
|
||||
from .common import Asset
|
||||
|
||||
|
@ -12,6 +13,10 @@ class Database(Asset):
|
|||
client_cert = EncryptTextField(verbose_name=_("Client cert"), blank=True)
|
||||
client_key = EncryptTextField(verbose_name=_("Client key"), blank=True)
|
||||
allow_invalid_cert = models.BooleanField(default=False, verbose_name=_('Allow invalid cert'))
|
||||
pg_ssl_mode = models.CharField(
|
||||
max_length=16, choices=PostgresqlSSLMode.choices,
|
||||
default=PostgresqlSSLMode.PREFER, verbose_name=_('Postgresql SSL mode')
|
||||
)
|
||||
|
||||
def __str__(self):
|
||||
return '{}({}://{}/{})'.format(self.name, self.type, self.address, self.db_name)
|
||||
|
|
|
@ -31,7 +31,7 @@ class Domain(LabeledMixin, JMSOrgBaseModel):
|
|||
|
||||
@lazyproperty
|
||||
def assets_amount(self):
|
||||
return self.assets.exclude(platform__name='Gateway').count()
|
||||
return self.assets.gateways(0).count()
|
||||
|
||||
def random_gateway(self):
|
||||
gateways = [gw for gw in self.active_gateways if gw.is_connective]
|
||||
|
|
|
@ -16,7 +16,7 @@ __all__ = ['Gateway']
|
|||
class GatewayManager(OrgManager):
|
||||
def get_queryset(self):
|
||||
queryset = super().get_queryset()
|
||||
queryset = queryset.filter(platform__name=GATEWAY_NAME)
|
||||
queryset = queryset.filter(platform__name__startswith=GATEWAY_NAME)
|
||||
return queryset
|
||||
|
||||
def bulk_create(self, objs, batch_size=None, ignore_conflicts=False):
|
||||
|
@ -33,10 +33,6 @@ class Gateway(Host):
|
|||
proxy = True
|
||||
verbose_name = _("Gateway")
|
||||
|
||||
def save(self, *args, **kwargs):
|
||||
self.platform = self.default_platform()
|
||||
return super().save(*args, **kwargs)
|
||||
|
||||
@classmethod
|
||||
def default_platform(cls):
|
||||
return Platform.objects.get(name=GATEWAY_NAME, internal=True)
|
||||
|
|
|
@ -31,6 +31,12 @@ __all__ = [
|
|||
class AssetProtocolsSerializer(serializers.ModelSerializer):
|
||||
port = serializers.IntegerField(required=False, allow_null=True, max_value=65535, min_value=0)
|
||||
|
||||
def get_render_help_text(self):
|
||||
if self.parent and self.parent.many:
|
||||
return _('Protocols, format is ["protocol/port"]')
|
||||
else:
|
||||
return _('Protocol, format is name/port')
|
||||
|
||||
def to_file_representation(self, data):
|
||||
return '{name}/{port}'.format(**data)
|
||||
|
||||
|
@ -97,6 +103,9 @@ class AssetAccountSerializer(AccountSerializer):
|
|||
attrs = super().validate(attrs)
|
||||
return self.set_secret(attrs)
|
||||
|
||||
def get_render_help_text(self):
|
||||
return _('Accounts, format [{"name": "x", "username": "x", "secret": "x", "secret_type": "password"}]')
|
||||
|
||||
class Meta(AccountSerializer.Meta):
|
||||
fields = [
|
||||
f for f in AccountSerializer.Meta.fields
|
||||
|
@ -121,19 +130,30 @@ class AccountSecretSerializer(SecretReadableMixin, CommonModelSerializer):
|
|||
}
|
||||
|
||||
|
||||
class NodeDisplaySerializer(serializers.ListField):
|
||||
def get_render_help_text(self):
|
||||
return _('Node path, format ["/org_name/node_name"], if node not exist, will create it')
|
||||
|
||||
def to_internal_value(self, data):
|
||||
return data
|
||||
|
||||
def to_representation(self, data):
|
||||
return data
|
||||
|
||||
|
||||
class AssetSerializer(BulkOrgResourceModelSerializer, ResourceLabelsMixin, WritableNestedModelSerializer):
|
||||
category = LabeledChoiceField(choices=Category.choices, read_only=True, label=_('Category'))
|
||||
type = LabeledChoiceField(choices=AllTypes.choices(), read_only=True, label=_('Type'))
|
||||
protocols = AssetProtocolsSerializer(many=True, required=False, label=_('Protocols'), default=())
|
||||
accounts = AssetAccountSerializer(many=True, required=False, allow_null=True, write_only=True, label=_('Accounts'))
|
||||
nodes_display = serializers.ListField(read_only=False, required=False, label=_("Node path"))
|
||||
nodes_display = NodeDisplaySerializer(read_only=False, required=False, label=_("Node path"))
|
||||
_accounts = None
|
||||
|
||||
class Meta:
|
||||
model = Asset
|
||||
fields_mini = ['id', 'name', 'address']
|
||||
fields_small = fields_mini + ['is_active', 'comment']
|
||||
fields_fk = ['domain', 'platform']
|
||||
fields_mini = ['id', 'name', 'address'] + fields_fk
|
||||
fields_small = fields_mini + ['is_active', 'comment']
|
||||
fields_m2m = [
|
||||
'nodes', 'labels', 'protocols',
|
||||
'nodes_display', 'accounts',
|
||||
|
|
|
@ -16,6 +16,7 @@ class CustomSerializer(AssetSerializer):
|
|||
class Meta(AssetSerializer.Meta):
|
||||
model = Custom
|
||||
fields = AssetSerializer.Meta.fields + ['custom_info']
|
||||
fields_unimport_template = ['custom_info']
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
super().__init__(*args, **kwargs)
|
||||
|
|
|
@ -16,9 +16,14 @@ class DatabaseSerializer(AssetSerializer):
|
|||
model = Database
|
||||
extra_fields = [
|
||||
'db_name', 'use_ssl', 'ca_cert', 'client_cert',
|
||||
'client_key', 'allow_invalid_cert'
|
||||
'client_key', 'allow_invalid_cert', 'pg_ssl_mode'
|
||||
]
|
||||
fields = AssetSerializer.Meta.fields + extra_fields
|
||||
extra_kwargs = {
|
||||
'ca_cert': {'help_text': _('CA cert help text')},
|
||||
'pg_ssl_mode': {'help_text': _('Postgresql ssl model help text')},
|
||||
}
|
||||
extra_kwargs.update(AssetSerializer.Meta.extra_kwargs)
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
super().__init__(*args, **kwargs)
|
||||
|
|
|
@ -68,7 +68,7 @@ class DomainListSerializer(DomainSerializer):
|
|||
@classmethod
|
||||
def setup_eager_loading(cls, queryset):
|
||||
queryset = queryset.annotate(
|
||||
assets_amount=Count('assets', filter=~Q(assets__platform__name='Gateway'), distinct=True),
|
||||
assets_amount=Count('assets', filter=~Q(assets__platform__name__startswith='Gateway'), distinct=True),
|
||||
)
|
||||
return queryset
|
||||
|
||||
|
|
|
@ -14,6 +14,11 @@ class GatewaySerializer(HostSerializer):
|
|||
class Meta(HostSerializer.Meta):
|
||||
model = Gateway
|
||||
|
||||
def validate_platform(self, p):
|
||||
if not p.name.startswith('Gateway'):
|
||||
raise serializers.ValidationError(_('The platform must start with Gateway'))
|
||||
return p
|
||||
|
||||
def validate_name(self, value):
|
||||
queryset = Asset.objects.filter(name=value)
|
||||
if self.instance:
|
||||
|
|
|
@ -147,6 +147,10 @@ class PlatformProtocolSerializer(serializers.ModelSerializer):
|
|||
name, port = data.split('/')
|
||||
return {'name': name, 'port': port}
|
||||
|
||||
@staticmethod
|
||||
def get_render_help_text():
|
||||
return _('Protocols, format is ["protocol/port"]')
|
||||
|
||||
|
||||
class PlatformCustomField(serializers.Serializer):
|
||||
TYPE_CHOICES = [(t, t) for t, c in type_field_map.items()]
|
||||
|
|
|
@ -21,8 +21,10 @@ def task_activity_callback(self, pid, trigger, tp, *args, **kwargs):
|
|||
|
||||
|
||||
@shared_task(
|
||||
queue='ansible', verbose_name=_('Asset execute automation'),
|
||||
activity_callback=task_activity_callback
|
||||
queue='ansible',
|
||||
verbose_name=_('Asset execute automation'),
|
||||
activity_callback=task_activity_callback,
|
||||
description=_("Unused")
|
||||
)
|
||||
def execute_asset_automation_task(pid, trigger, tp):
|
||||
model = AutomationTypes.get_type_model(tp)
|
||||
|
|
|
@ -18,8 +18,13 @@ __all__ = [
|
|||
|
||||
|
||||
@shared_task(
|
||||
queue="ansible", verbose_name=_('Gather assets facts'),
|
||||
activity_callback=lambda self, asset_ids, org_id, *args, **kwargs: (asset_ids, org_id)
|
||||
queue="ansible",
|
||||
verbose_name=_('Gather assets facts'),
|
||||
activity_callback=lambda self, asset_ids, org_id, *args, **kwargs: (asset_ids, org_id),
|
||||
description=_(
|
||||
"""When clicking 'Refresh hardware info' in 'Console - Asset Details - Basic' this task
|
||||
will be executed"""
|
||||
)
|
||||
)
|
||||
def gather_assets_facts_task(asset_ids, org_id, task_name=None):
|
||||
from assets.models import GatherFactsAutomation
|
||||
|
|
|
@ -1,19 +1,25 @@
|
|||
from celery import shared_task
|
||||
from django.utils.translation import gettext_lazy as _
|
||||
|
||||
from assets.utils import check_node_assets_amount
|
||||
from common.const.crontab import CRONTAB_AT_AM_TWO
|
||||
from common.utils import get_logger
|
||||
from common.utils.lock import AcquireFailed
|
||||
from ops.celery.decorator import register_as_period_task
|
||||
from orgs.models import Organization
|
||||
from orgs.utils import tmp_to_org
|
||||
from ops.celery.decorator import register_as_period_task
|
||||
from assets.utils import check_node_assets_amount
|
||||
|
||||
from common.utils.lock import AcquireFailed
|
||||
from common.utils import get_logger
|
||||
from common.const.crontab import CRONTAB_AT_AM_TWO
|
||||
|
||||
logger = get_logger(__file__)
|
||||
|
||||
|
||||
@shared_task(verbose_name=_('Check the amount of assets under the node'))
|
||||
@shared_task(
|
||||
verbose_name=_('Check the amount of assets under the node'),
|
||||
description=_(
|
||||
"""Manually verifying asset quantities updates the asset count for nodes under the
|
||||
current organization. This task will be called in the following two cases: when updating
|
||||
nodes and when the number of nodes exceeds 100"""
|
||||
)
|
||||
)
|
||||
def check_node_assets_amount_task(org_id=None):
|
||||
if org_id is None:
|
||||
orgs = Organization.objects.all()
|
||||
|
@ -30,7 +36,13 @@ def check_node_assets_amount_task(org_id=None):
|
|||
logger.error(error)
|
||||
|
||||
|
||||
@shared_task(verbose_name=_('Periodic check the amount of assets under the node'))
|
||||
@shared_task(
|
||||
verbose_name=_('Periodic check the amount of assets under the node'),
|
||||
description=_(
|
||||
"""Schedule the check_node_assets_amount_task to periodically update the asset count of
|
||||
all nodes under all organizations"""
|
||||
)
|
||||
)
|
||||
@register_as_period_task(crontab=CRONTAB_AT_AM_TWO)
|
||||
def check_node_assets_amount_period_task():
|
||||
check_node_assets_amount_task()
|
||||
|
|
|
@ -17,8 +17,12 @@ __all__ = [
|
|||
|
||||
|
||||
@shared_task(
|
||||
verbose_name=_('Test assets connectivity'), queue='ansible',
|
||||
activity_callback=lambda self, asset_ids, org_id, *args, **kwargs: (asset_ids, org_id)
|
||||
verbose_name=_('Test assets connectivity'),
|
||||
queue='ansible',
|
||||
activity_callback=lambda self, asset_ids, org_id, *args, **kwargs: (asset_ids, org_id),
|
||||
description=_(
|
||||
"When clicking 'Test Asset Connectivity' in 'Asset Details - Basic Settings' this task will be executed"
|
||||
)
|
||||
)
|
||||
def test_assets_connectivity_task(asset_ids, org_id, task_name=None):
|
||||
from assets.models import PingAutomation
|
||||
|
|
|
@ -16,8 +16,12 @@ __all__ = [
|
|||
|
||||
|
||||
@shared_task(
|
||||
verbose_name=_('Test gateways connectivity'), queue='ansible',
|
||||
activity_callback=lambda self, asset_ids, org_id, *args, **kwargs: (asset_ids, org_id)
|
||||
verbose_name=_('Test gateways connectivity'),
|
||||
queue='ansible',
|
||||
activity_callback=lambda self, asset_ids, org_id, *args, **kwargs: (asset_ids, org_id),
|
||||
description=_(
|
||||
"When clicking 'Test Connection' in 'Domain Details - Gateway' this task will be executed"
|
||||
)
|
||||
)
|
||||
def test_gateways_connectivity_task(asset_ids, org_id, local_port, task_name=None):
|
||||
from assets.models import PingAutomation
|
||||
|
@ -33,4 +37,5 @@ def test_gateways_connectivity_task(asset_ids, org_id, local_port, task_name=Non
|
|||
def test_gateways_connectivity_manual(gateway_ids, local_port):
|
||||
task_name = gettext_noop("Test gateways connectivity")
|
||||
gateway_ids = [str(i) for i in gateway_ids]
|
||||
return test_gateways_connectivity_task.delay(gateway_ids, str(current_org.id), local_port, task_name)
|
||||
return test_gateways_connectivity_task.delay(gateway_ids, str(current_org.id), local_port,
|
||||
task_name)
|
||||
|
|
|
@ -1,2 +1 @@
|
|||
from .k8s import *
|
||||
from .node import *
|
||||
|
|
|
@ -1,177 +0,0 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
from urllib.parse import urlencode, urlparse
|
||||
|
||||
from kubernetes import client
|
||||
from kubernetes.client import api_client
|
||||
from kubernetes.client.api import core_v1_api
|
||||
from sshtunnel import SSHTunnelForwarder, BaseSSHTunnelForwarderError
|
||||
|
||||
from common.utils import get_logger
|
||||
from ..const import CloudTypes, Category
|
||||
|
||||
logger = get_logger(__file__)
|
||||
|
||||
|
||||
class KubernetesClient:
|
||||
def __init__(self, asset, token):
|
||||
self.url = asset.address
|
||||
self.token = token or ''
|
||||
self.server = self.get_gateway_server(asset)
|
||||
|
||||
@property
|
||||
def api(self):
|
||||
configuration = client.Configuration()
|
||||
scheme = urlparse(self.url).scheme
|
||||
if not self.server:
|
||||
host = self.url
|
||||
else:
|
||||
host = f'{scheme}://127.0.0.1:{self.server.local_bind_port}'
|
||||
configuration.host = host
|
||||
configuration.verify_ssl = False
|
||||
configuration.api_key = {"authorization": "Bearer " + self.token}
|
||||
c = api_client.ApiClient(configuration=configuration)
|
||||
api = core_v1_api.CoreV1Api(c)
|
||||
return api
|
||||
|
||||
def get_namespaces(self):
|
||||
namespaces = []
|
||||
resp = self.api.list_namespace()
|
||||
for ns in resp.items:
|
||||
namespaces.append(ns.metadata.name)
|
||||
return namespaces
|
||||
|
||||
def get_pods(self, namespace):
|
||||
pods = []
|
||||
resp = self.api.list_namespaced_pod(namespace)
|
||||
for pd in resp.items:
|
||||
pods.append(pd.metadata.name)
|
||||
return pods
|
||||
|
||||
def get_containers(self, namespace, pod_name):
|
||||
containers = []
|
||||
resp = self.api.read_namespaced_pod(pod_name, namespace)
|
||||
for container in resp.spec.containers:
|
||||
containers.append(container.name)
|
||||
return containers
|
||||
|
||||
@staticmethod
|
||||
def get_gateway_server(asset):
|
||||
gateway = None
|
||||
if not asset.is_gateway and asset.domain:
|
||||
gateway = asset.domain.select_gateway()
|
||||
|
||||
if not gateway:
|
||||
return
|
||||
|
||||
remote_bind_address = (
|
||||
urlparse(asset.address).hostname,
|
||||
urlparse(asset.address).port or 443
|
||||
)
|
||||
server = SSHTunnelForwarder(
|
||||
(gateway.address, gateway.port),
|
||||
ssh_username=gateway.username,
|
||||
ssh_password=gateway.password,
|
||||
ssh_pkey=gateway.private_key_path,
|
||||
remote_bind_address=remote_bind_address
|
||||
)
|
||||
try:
|
||||
server.start()
|
||||
except BaseSSHTunnelForwarderError:
|
||||
err_msg = 'Gateway is not active: %s' % asset.get('name', '')
|
||||
print('\033[31m %s \033[0m\n' % err_msg)
|
||||
return server
|
||||
|
||||
def run(self, tp, *args):
|
||||
func_name = f'get_{tp}s'
|
||||
data = []
|
||||
if hasattr(self, func_name):
|
||||
try:
|
||||
data = getattr(self, func_name)(*args)
|
||||
except Exception as e:
|
||||
logger.error(f'K8S tree get {tp} error: {e}')
|
||||
|
||||
if self.server:
|
||||
self.server.stop()
|
||||
return data
|
||||
|
||||
|
||||
class KubernetesTree:
|
||||
def __init__(self, asset, secret):
|
||||
self.asset = asset
|
||||
self.secret = secret
|
||||
|
||||
def as_asset_tree_node(self):
|
||||
i = str(self.asset.id)
|
||||
name = str(self.asset)
|
||||
node = self.create_tree_node(
|
||||
i, i, name, 'asset', icon='k8s', is_open=True,
|
||||
)
|
||||
return node
|
||||
|
||||
def as_namespace_node(self, name, tp):
|
||||
i = urlencode({'namespace': name})
|
||||
pid = str(self.asset.id)
|
||||
node = self.create_tree_node(i, pid, name, tp, icon='cloud')
|
||||
return node
|
||||
|
||||
def as_pod_tree_node(self, namespace, name, tp):
|
||||
pid = urlencode({'namespace': namespace})
|
||||
i = urlencode({'namespace': namespace, 'pod': name})
|
||||
node = self.create_tree_node(i, pid, name, tp, icon='cloud')
|
||||
return node
|
||||
|
||||
def as_container_tree_node(self, namespace, pod, name, tp):
|
||||
pid = urlencode({'namespace': namespace, 'pod': pod})
|
||||
i = urlencode({'namespace': namespace, 'pod': pod, 'container': name})
|
||||
node = self.create_tree_node(
|
||||
i, pid, name, tp, icon='cloud', is_container=True
|
||||
)
|
||||
return node
|
||||
|
||||
@staticmethod
|
||||
def create_tree_node(id_, pid, name, identity, icon='', is_container=False, is_open=False):
|
||||
node = {
|
||||
'id': id_,
|
||||
'name': name,
|
||||
'title': name,
|
||||
'pId': pid,
|
||||
'isParent': not is_container,
|
||||
'open': is_open,
|
||||
'iconSkin': icon,
|
||||
'meta': {
|
||||
'type': 'k8s',
|
||||
'data': {
|
||||
'category': Category.CLOUD,
|
||||
'type': CloudTypes.K8S,
|
||||
'identity': identity
|
||||
}
|
||||
}
|
||||
}
|
||||
return node
|
||||
|
||||
def async_tree_node(self, namespace, pod):
|
||||
tree = []
|
||||
k8s_client = KubernetesClient(self.asset, self.secret)
|
||||
if pod:
|
||||
tp = 'container'
|
||||
containers = k8s_client.run(
|
||||
tp, namespace, pod
|
||||
)
|
||||
for container in containers:
|
||||
container_node = self.as_container_tree_node(
|
||||
namespace, pod, container, tp
|
||||
)
|
||||
tree.append(container_node)
|
||||
elif namespace:
|
||||
tp = 'pod'
|
||||
pods = k8s_client.run(tp, namespace)
|
||||
for pod in pods:
|
||||
pod_node = self.as_pod_tree_node(namespace, pod, tp)
|
||||
tree.append(pod_node)
|
||||
else:
|
||||
tp = 'namespace'
|
||||
namespaces = k8s_client.run(tp)
|
||||
for namespace in namespaces:
|
||||
namespace_node = self.as_namespace_node(namespace, tp)
|
||||
tree.append(namespace_node)
|
||||
return tree
|
|
@ -128,7 +128,15 @@ def clean_expired_session_period():
|
|||
logger.info("Clean session replay done")
|
||||
|
||||
|
||||
@shared_task(verbose_name=_('Clean audits session task log'))
|
||||
@shared_task(
|
||||
verbose_name=_('Clean audits session task log'),
|
||||
description=_(
|
||||
"""Since the system generates login logs, operation logs, file upload logs, activity
|
||||
logs, Celery execution logs, session recordings, command records, and password change
|
||||
logs, it will perform cleanup of records that exceed the time limit according to the
|
||||
'Tasks - Regular clean-up' in the system settings at 2 a.m daily"""
|
||||
)
|
||||
)
|
||||
@register_as_period_task(crontab=CRONTAB_AT_AM_TWO)
|
||||
def clean_audits_log_period():
|
||||
print("Start clean audit session task log")
|
||||
|
@ -142,7 +150,13 @@ def clean_audits_log_period():
|
|||
clean_password_change_log_period()
|
||||
|
||||
|
||||
@shared_task(verbose_name=_('Upload FTP file to external storage'))
|
||||
@shared_task(
|
||||
verbose_name=_('Upload FTP file to external storage'),
|
||||
description=_(
|
||||
"""If SERVER_REPLAY_STORAGE is configured, files uploaded through file management will be
|
||||
synchronized to external storage"""
|
||||
)
|
||||
)
|
||||
def upload_ftp_file_to_external_storage(ftp_log_id, file_name):
|
||||
logger.info(f'Start upload FTP file record to external storage: {ftp_log_id} - {file_name}')
|
||||
ftp_log = FTPLog.objects.filter(id=ftp_log_id).first()
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
# coding:utf-8
|
||||
#
|
||||
|
||||
import abc
|
||||
import ldap
|
||||
from django.conf import settings
|
||||
from django.core.exceptions import ImproperlyConfigured, ObjectDoesNotExist
|
||||
|
@ -15,13 +15,16 @@ from .base import JMSBaseAuthBackend
|
|||
logger = _LDAPConfig.get_logger()
|
||||
|
||||
|
||||
class LDAPAuthorizationBackend(JMSBaseAuthBackend, LDAPBackend):
|
||||
"""
|
||||
Override this class to override _LDAPUser to LDAPUser
|
||||
"""
|
||||
@staticmethod
|
||||
def is_enabled():
|
||||
return settings.AUTH_LDAP
|
||||
class LDAPBaseBackend(LDAPBackend):
|
||||
|
||||
@abc.abstractmethod
|
||||
def is_enabled(self):
|
||||
raise NotImplementedError('is_enabled')
|
||||
|
||||
@property
|
||||
@abc.abstractmethod
|
||||
def is_user_login_only_in_users(self):
|
||||
raise NotImplementedError('is_authenticated')
|
||||
|
||||
def get_or_build_user(self, username, ldap_user):
|
||||
"""
|
||||
|
@ -56,38 +59,6 @@ class LDAPAuthorizationBackend(JMSBaseAuthBackend, LDAPBackend):
|
|||
|
||||
return user, built
|
||||
|
||||
def pre_check(self, username, password):
|
||||
if not settings.AUTH_LDAP:
|
||||
error = 'Not enabled auth ldap'
|
||||
return False, error
|
||||
if not username:
|
||||
error = 'Username is None'
|
||||
return False, error
|
||||
if not password:
|
||||
error = 'Password is None'
|
||||
return False, error
|
||||
if settings.AUTH_LDAP_USER_LOGIN_ONLY_IN_USERS:
|
||||
user_model = self.get_user_model()
|
||||
exist = user_model.objects.filter(username=username).exists()
|
||||
if not exist:
|
||||
error = 'user ({}) is not in the user list'.format(username)
|
||||
return False, error
|
||||
return True, ''
|
||||
|
||||
def authenticate(self, request=None, username=None, password=None, **kwargs):
|
||||
logger.info('Authentication LDAP backend')
|
||||
if username is None or password is None:
|
||||
logger.info('No username or password')
|
||||
return None
|
||||
match, msg = self.pre_check(username, password)
|
||||
if not match:
|
||||
logger.info('Authenticate failed: {}'.format(msg))
|
||||
return None
|
||||
ldap_user = LDAPUser(self, username=username.strip(), request=request)
|
||||
user = self.authenticate_ldap_user(ldap_user, password)
|
||||
logger.info('Authenticate user: {}'.format(user))
|
||||
return user if self.user_can_authenticate(user) else None
|
||||
|
||||
def get_user(self, user_id):
|
||||
user = None
|
||||
try:
|
||||
|
@ -111,6 +82,67 @@ class LDAPAuthorizationBackend(JMSBaseAuthBackend, LDAPBackend):
|
|||
user = ldap_user.populate_user()
|
||||
return user
|
||||
|
||||
def authenticate(self, request=None, username=None, password=None, **kwargs):
|
||||
logger.info('Authentication LDAP backend')
|
||||
if username is None or password is None:
|
||||
logger.info('No username or password')
|
||||
return None
|
||||
match, msg = self.pre_check(username, password)
|
||||
if not match:
|
||||
logger.info('Authenticate failed: {}'.format(msg))
|
||||
return None
|
||||
ldap_user = LDAPUser(self, username=username.strip(), request=request)
|
||||
user = self.authenticate_ldap_user(ldap_user, password)
|
||||
logger.info('Authenticate user: {}'.format(user))
|
||||
return user if self.user_can_authenticate(user) else None
|
||||
|
||||
def pre_check(self, username, password):
|
||||
if not self.is_enabled():
|
||||
error = 'Not enabled auth ldap'
|
||||
return False, error
|
||||
if not username:
|
||||
error = 'Username is None'
|
||||
return False, error
|
||||
if not password:
|
||||
error = 'Password is None'
|
||||
return False, error
|
||||
if self.is_user_login_only_in_users:
|
||||
user_model = self.get_user_model()
|
||||
exist = user_model.objects.filter(username=username).exists()
|
||||
if not exist:
|
||||
error = 'user ({}) is not in the user list'.format(username)
|
||||
return False, error
|
||||
return True, ''
|
||||
|
||||
|
||||
class LDAPAuthorizationBackend(JMSBaseAuthBackend, LDAPBaseBackend):
|
||||
"""
|
||||
Override this class to override _LDAPUser to LDAPUser
|
||||
"""
|
||||
|
||||
@staticmethod
|
||||
def is_enabled():
|
||||
return settings.AUTH_LDAP
|
||||
|
||||
@property
|
||||
def is_user_login_only_in_users(self):
|
||||
return settings.AUTH_LDAP_USER_LOGIN_ONLY_IN_USERS
|
||||
|
||||
|
||||
class LDAPHAAuthorizationBackend(JMSBaseAuthBackend, LDAPBaseBackend):
|
||||
"""
|
||||
Override this class to override _LDAPUser to LDAPUser
|
||||
"""
|
||||
settings_prefix = "AUTH_LDAP_HA_"
|
||||
|
||||
@staticmethod
|
||||
def is_enabled():
|
||||
return settings.AUTH_LDAP_HA
|
||||
|
||||
@property
|
||||
def is_user_login_only_in_users(self):
|
||||
return settings.AUTH_LDAP_HA_USER_LOGIN_ONLY_IN_USERS
|
||||
|
||||
|
||||
class LDAPUser(_LDAPUser):
|
||||
|
||||
|
@ -126,13 +158,18 @@ class LDAPUser(_LDAPUser):
|
|||
configuration in the settings.py file
|
||||
is configured with a `lambda` problem value
|
||||
"""
|
||||
|
||||
if isinstance(self.backend, LDAPAuthorizationBackend):
|
||||
search_filter = settings.AUTH_LDAP_SEARCH_FILTER
|
||||
search_ou = settings.AUTH_LDAP_SEARCH_OU
|
||||
else:
|
||||
search_filter = settings.AUTH_LDAP_HA_SEARCH_FILTER
|
||||
search_ou = settings.AUTH_LDAP_HA_SEARCH_OU
|
||||
user_search_union = [
|
||||
LDAPSearch(
|
||||
USER_SEARCH, ldap.SCOPE_SUBTREE,
|
||||
settings.AUTH_LDAP_SEARCH_FILTER
|
||||
search_filter
|
||||
)
|
||||
for USER_SEARCH in str(settings.AUTH_LDAP_SEARCH_OU).split("|")
|
||||
for USER_SEARCH in str(search_ou).split("|")
|
||||
]
|
||||
|
||||
search = LDAPSearchUnion(*user_search_union)
|
||||
|
@ -169,7 +206,8 @@ class LDAPUser(_LDAPUser):
|
|||
else:
|
||||
value = is_true(value)
|
||||
except LookupError:
|
||||
logger.warning("{} does not have a value for the attribute {}".format(self.dn, attr))
|
||||
logger.warning(
|
||||
"{} does not have a value for the attribute {}".format(self.dn, attr))
|
||||
else:
|
||||
if not hasattr(self._user, field):
|
||||
continue
|
||||
|
|
|
@ -8,27 +8,26 @@
|
|||
"""
|
||||
|
||||
import base64
|
||||
import requests
|
||||
|
||||
from rest_framework.exceptions import ParseError
|
||||
import requests
|
||||
from django.conf import settings
|
||||
from django.contrib.auth import get_user_model
|
||||
from django.contrib.auth.backends import ModelBackend
|
||||
from django.core.exceptions import SuspiciousOperation
|
||||
from django.db import transaction
|
||||
from django.urls import reverse
|
||||
from django.conf import settings
|
||||
from rest_framework.exceptions import ParseError
|
||||
|
||||
from common.utils import get_logger
|
||||
from authentication.signals import user_auth_success, user_auth_failed
|
||||
from authentication.utils import build_absolute_uri_for_oidc
|
||||
from common.utils import get_logger
|
||||
from users.utils import construct_user_email
|
||||
|
||||
from ..base import JMSBaseAuthBackend
|
||||
from .utils import validate_and_return_id_token
|
||||
from .decorator import ssl_verification
|
||||
from .signals import (
|
||||
openid_create_or_update_user
|
||||
)
|
||||
from authentication.signals import user_auth_success, user_auth_failed
|
||||
from .utils import validate_and_return_id_token
|
||||
from ..base import JMSBaseAuthBackend
|
||||
|
||||
logger = get_logger(__file__)
|
||||
|
||||
|
@ -55,16 +54,17 @@ class UserMixin:
|
|||
logger.debug(log_prompt.format(user_attrs))
|
||||
|
||||
username = user_attrs.get('username')
|
||||
name = user_attrs.get('name')
|
||||
groups = user_attrs.pop('groups', None)
|
||||
|
||||
user, created = get_user_model().objects.get_or_create(
|
||||
username=username, defaults=user_attrs
|
||||
)
|
||||
user_attrs['groups'] = groups
|
||||
logger.debug(log_prompt.format("user: {}|created: {}".format(user, created)))
|
||||
logger.debug(log_prompt.format("Send signal => openid create or update user"))
|
||||
openid_create_or_update_user.send(
|
||||
sender=self.__class__, request=request, user=user, created=created,
|
||||
name=name, username=username, email=email
|
||||
sender=self.__class__, request=request, user=user,
|
||||
created=created, attrs=user_attrs,
|
||||
)
|
||||
return user, created
|
||||
|
||||
|
@ -269,7 +269,8 @@ class OIDCAuthPasswordBackend(OIDCBaseBackend):
|
|||
|
||||
# Calls the token endpoint.
|
||||
logger.debug(log_prompt.format('Call the token endpoint'))
|
||||
token_response = requests.post(settings.AUTH_OPENID_PROVIDER_TOKEN_ENDPOINT, data=token_payload, timeout=request_timeout)
|
||||
token_response = requests.post(settings.AUTH_OPENID_PROVIDER_TOKEN_ENDPOINT, data=token_payload,
|
||||
timeout=request_timeout)
|
||||
try:
|
||||
token_response.raise_for_status()
|
||||
token_response_data = token_response.json()
|
||||
|
|
|
@ -17,13 +17,16 @@ import time
|
|||
from django.conf import settings
|
||||
from django.contrib import auth
|
||||
from django.core.exceptions import SuspiciousOperation
|
||||
from django.db import IntegrityError
|
||||
from django.http import HttpResponseRedirect, QueryDict
|
||||
from django.urls import reverse
|
||||
from django.utils.crypto import get_random_string
|
||||
from django.utils.http import urlencode
|
||||
from django.views.generic import View
|
||||
from django.utils.translation import gettext_lazy as _
|
||||
|
||||
from authentication.utils import build_absolute_uri_for_oidc
|
||||
from authentication.views.mixins import FlashMessageMixin
|
||||
from common.utils import safe_next_url
|
||||
from .utils import get_logger
|
||||
|
||||
|
@ -113,7 +116,7 @@ class OIDCAuthRequestView(View):
|
|||
return HttpResponseRedirect(redirect_url)
|
||||
|
||||
|
||||
class OIDCAuthCallbackView(View):
|
||||
class OIDCAuthCallbackView(View, FlashMessageMixin):
|
||||
""" Allows to complete the authentication process.
|
||||
|
||||
This view acts as the main endpoint to complete the authentication process involving the OIDC
|
||||
|
@ -165,7 +168,13 @@ class OIDCAuthCallbackView(View):
|
|||
next_url = request.session.get('oidc_auth_next_url', None)
|
||||
code_verifier = request.session.get('oidc_auth_code_verifier', None)
|
||||
logger.debug(log_prompt.format('Process authenticate'))
|
||||
user = auth.authenticate(nonce=nonce, request=request, code_verifier=code_verifier)
|
||||
try:
|
||||
user = auth.authenticate(nonce=nonce, request=request, code_verifier=code_verifier)
|
||||
except IntegrityError:
|
||||
title = _("OpenID Error")
|
||||
msg = _('Please check if a user with the same username or email already exists')
|
||||
response = self.get_failed_response('/', title, msg)
|
||||
return response
|
||||
if user:
|
||||
logger.debug(log_prompt.format('Login: {}'.format(user)))
|
||||
auth.login(self.request, user)
|
||||
|
|
|
@ -27,9 +27,13 @@ class SAML2Backend(JMSModelBackend):
|
|||
log_prompt = "Get or Create user [SAML2Backend]: {}"
|
||||
logger.debug(log_prompt.format('start'))
|
||||
|
||||
groups = saml_user_data.pop('groups', None)
|
||||
|
||||
user, created = get_user_model().objects.get_or_create(
|
||||
username=saml_user_data['username'], defaults=saml_user_data
|
||||
)
|
||||
|
||||
saml_user_data['groups'] = groups
|
||||
logger.debug(log_prompt.format("user: {}|created: {}".format(user, created)))
|
||||
|
||||
logger.debug(log_prompt.format("Send signal => saml2 create or update user"))
|
||||
|
|
|
@ -3,8 +3,10 @@ from urllib import parse
|
|||
|
||||
from django.conf import settings
|
||||
from django.contrib import auth
|
||||
from django.db import IntegrityError
|
||||
from django.http import HttpResponseRedirect, HttpResponse, HttpResponseServerError
|
||||
from django.urls import reverse
|
||||
from django.utils.translation import gettext_lazy as _
|
||||
from django.views import View
|
||||
from django.views.decorators.csrf import csrf_exempt
|
||||
from onelogin.saml2.auth import OneLogin_Saml2_Auth
|
||||
|
@ -14,6 +16,7 @@ from onelogin.saml2.idp_metadata_parser import (
|
|||
dict_deep_merge
|
||||
)
|
||||
|
||||
from authentication.views.mixins import FlashMessageMixin
|
||||
from common.utils import get_logger
|
||||
from .settings import JmsSaml2Settings
|
||||
|
||||
|
@ -87,6 +90,7 @@ class PrepareRequestMixin:
|
|||
('name', 'name', False),
|
||||
('phone', 'phone', False),
|
||||
('comment', 'comment', False),
|
||||
('groups', 'groups', False),
|
||||
)
|
||||
attr_list = []
|
||||
for name, friend_name, is_required in need_attrs:
|
||||
|
@ -185,7 +189,7 @@ class PrepareRequestMixin:
|
|||
user_attrs = {}
|
||||
attr_mapping = settings.SAML2_RENAME_ATTRIBUTES
|
||||
attrs = saml_instance.get_attributes()
|
||||
valid_attrs = ['username', 'name', 'email', 'comment', 'phone']
|
||||
valid_attrs = ['username', 'name', 'email', 'comment', 'phone', 'groups']
|
||||
|
||||
for attr, value in attrs.items():
|
||||
attr = attr.rsplit('/', 1)[-1]
|
||||
|
@ -242,7 +246,7 @@ class Saml2EndSessionView(View, PrepareRequestMixin):
|
|||
return HttpResponseRedirect(logout_url)
|
||||
|
||||
|
||||
class Saml2AuthCallbackView(View, PrepareRequestMixin):
|
||||
class Saml2AuthCallbackView(View, PrepareRequestMixin, FlashMessageMixin):
|
||||
|
||||
def post(self, request):
|
||||
log_prompt = "Process SAML2 POST requests: {}"
|
||||
|
@ -271,7 +275,13 @@ class Saml2AuthCallbackView(View, PrepareRequestMixin):
|
|||
|
||||
logger.debug(log_prompt.format('Process authenticate'))
|
||||
saml_user_data = self.get_attributes(saml_instance)
|
||||
user = auth.authenticate(request=request, saml_user_data=saml_user_data)
|
||||
try:
|
||||
user = auth.authenticate(request=request, saml_user_data=saml_user_data)
|
||||
except IntegrityError:
|
||||
title = _("SAML2 Error")
|
||||
msg = _('Please check if a user with the same username or email already exists')
|
||||
response = self.get_failed_response('/', title, msg)
|
||||
return response
|
||||
if user and user.is_valid:
|
||||
logger.debug(log_prompt.format('Login: {}'.format(user)))
|
||||
auth.login(self.request, user)
|
||||
|
|
|
@ -301,6 +301,7 @@ class MFAMixin:
|
|||
|
||||
|
||||
class AuthPostCheckMixin:
|
||||
|
||||
@classmethod
|
||||
def generate_reset_password_url_with_flash_msg(cls, user, message):
|
||||
reset_passwd_url = reverse('authentication:reset-password')
|
||||
|
@ -319,20 +320,26 @@ class AuthPostCheckMixin:
|
|||
|
||||
@classmethod
|
||||
def _check_passwd_is_too_simple(cls, user: User, password):
|
||||
if password == 'admin' or password == 'ChangeMe':
|
||||
if not user.is_auth_backend_model():
|
||||
return
|
||||
if user.check_passwd_too_simple(password):
|
||||
message = _('Your password is too simple, please change it for security')
|
||||
url = cls.generate_reset_password_url_with_flash_msg(user, message=message)
|
||||
raise errors.PasswordTooSimple(url)
|
||||
|
||||
@classmethod
|
||||
def _check_passwd_need_update(cls, user: User):
|
||||
if user.need_update_password:
|
||||
if not user.is_auth_backend_model():
|
||||
return
|
||||
if user.check_need_update_password():
|
||||
message = _('You should to change your password before login')
|
||||
url = cls.generate_reset_password_url_with_flash_msg(user, message)
|
||||
raise errors.PasswordNeedUpdate(url)
|
||||
|
||||
@classmethod
|
||||
def _check_password_require_reset_or_not(cls, user: User):
|
||||
if not user.is_auth_backend_model():
|
||||
return
|
||||
if user.password_has_expired:
|
||||
message = _('Your password has expired, please reset before logging in')
|
||||
url = cls.generate_reset_password_url_with_flash_msg(user, message)
|
||||
|
|
|
@ -2,13 +2,19 @@
|
|||
#
|
||||
|
||||
from celery import shared_task
|
||||
from ops.celery.decorator import register_as_period_task
|
||||
from django.contrib.sessions.models import Session
|
||||
from django.utils import timezone
|
||||
from django.utils.translation import gettext_lazy as _
|
||||
|
||||
from ops.celery.decorator import register_as_period_task
|
||||
|
||||
@shared_task(verbose_name=_('Clean expired session'))
|
||||
|
||||
@shared_task(
|
||||
verbose_name=_('Clean expired session'),
|
||||
description=_(
|
||||
"Since user logins create sessions, the system will clean up expired sessions every 24 hours"
|
||||
)
|
||||
)
|
||||
@register_as_period_task(interval=3600 * 24)
|
||||
def clean_django_sessions():
|
||||
Session.objects.filter(expire_date__lt=timezone.now()).delete()
|
||||
|
|
|
@ -38,9 +38,15 @@ class SuggestionMixin:
|
|||
class RenderToJsonMixin:
|
||||
@action(methods=[POST, PUT], detail=False, url_path='render-to-json')
|
||||
def render_to_json(self, request: Request, *args, **kwargs):
|
||||
rows = request.data
|
||||
if rows and isinstance(rows[0], dict):
|
||||
first = list(rows[0].values())[0]
|
||||
if first.startswith('#Help'):
|
||||
rows.pop(0)
|
||||
|
||||
data = {
|
||||
'title': (),
|
||||
'data': request.data,
|
||||
'data': rows,
|
||||
}
|
||||
|
||||
jms_context = getattr(request, 'jms_context', {})
|
||||
|
|
|
@ -1,5 +1,6 @@
|
|||
from __future__ import unicode_literals
|
||||
|
||||
import os
|
||||
import sys
|
||||
|
||||
from django.apps import AppConfig
|
||||
|
@ -12,8 +13,11 @@ class CommonConfig(AppConfig):
|
|||
from . import signal_handlers # noqa
|
||||
from . import tasks # noqa
|
||||
from .signals import django_ready
|
||||
|
||||
excludes = ['migrate', 'compilemessages', 'makemigrations']
|
||||
for i in excludes:
|
||||
if i in sys.argv:
|
||||
return
|
||||
django_ready.send(CommonConfig)
|
||||
|
||||
if not os.environ.get('DJANGO_DEBUG_SHELL'):
|
||||
django_ready.send(CommonConfig)
|
||||
|
|
|
@ -1,5 +1,6 @@
|
|||
|
||||
CRONTAB_AT_AM_TWO = '0 2 * * *'
|
||||
CRONTAB_AT_AM_THREE = '0 3 * * *'
|
||||
CRONTAB_AT_AM_TEN = '0 10 * * *'
|
||||
CRONTAB_AT_PM_TWO = '0 14 * * *'
|
||||
|
||||
|
|
|
@ -108,7 +108,7 @@ class BaseFileParser(BaseParser):
|
|||
if not matched:
|
||||
return v
|
||||
obj_name, obj_id = matched.groups()
|
||||
if len(obj_id) < 36:
|
||||
if obj_id.isdigit():
|
||||
obj_id = int(obj_id)
|
||||
return {'pk': obj_id, 'name': obj_name}
|
||||
|
||||
|
@ -119,8 +119,6 @@ class BaseFileParser(BaseParser):
|
|||
value = field.to_file_internal_value(value)
|
||||
elif isinstance(field, serializers.BooleanField):
|
||||
value = value.lower() in ['true', '1', 'yes']
|
||||
elif isinstance(field, serializers.ChoiceField):
|
||||
value = value
|
||||
elif isinstance(field, ObjectRelatedField):
|
||||
if field.many:
|
||||
value = [self.id_name_to_obj(v) for v in value]
|
||||
|
@ -164,6 +162,15 @@ class BaseFileParser(BaseParser):
|
|||
data.append(row_data)
|
||||
return data
|
||||
|
||||
@staticmethod
|
||||
def pop_help_text_if_need(rows):
|
||||
rows = list(rows)
|
||||
if not rows:
|
||||
return rows
|
||||
if rows[0][0].startswith('#Help'):
|
||||
rows.pop(0)
|
||||
return rows
|
||||
|
||||
def parse(self, stream, media_type=None, parser_context=None):
|
||||
assert parser_context is not None, '`parser_context` should not be `None`'
|
||||
|
||||
|
@ -192,6 +199,7 @@ class BaseFileParser(BaseParser):
|
|||
request.jms_context = {}
|
||||
request.jms_context['column_title_field_pairs'] = column_title_field_pairs
|
||||
|
||||
rows = self.pop_help_text_if_need(rows)
|
||||
data = self.generate_data(field_names, rows)
|
||||
return data
|
||||
except Exception as e:
|
||||
|
|
|
@ -5,12 +5,13 @@ from datetime import datetime
|
|||
|
||||
import pyzipper
|
||||
from django.conf import settings
|
||||
from django.utils import timezone
|
||||
from django.utils.translation import gettext_lazy as _
|
||||
from rest_framework import serializers
|
||||
from rest_framework.renderers import BaseRenderer
|
||||
from rest_framework.utils import encoders, json
|
||||
|
||||
from common.serializers.fields import ObjectRelatedField, LabeledChoiceField
|
||||
from common.serializers import fields as common_fields
|
||||
from common.utils import get_logger
|
||||
|
||||
logger = get_logger(__file__)
|
||||
|
@ -38,24 +39,27 @@ class BaseFileRenderer(BaseRenderer):
|
|||
filename_prefix = serializer.Meta.model.__name__.lower()
|
||||
else:
|
||||
filename_prefix = 'download'
|
||||
now = datetime.now().strftime("%Y-%m-%d_%H-%M-%S")
|
||||
filename = "{}_{}.{}".format(filename_prefix, now, self.format)
|
||||
suffix = datetime.now().strftime("%Y-%m-%d_%H-%M-%S")
|
||||
if self.template == 'import':
|
||||
suffix = 'template'
|
||||
filename = "{}_{}.{}".format(filename_prefix, suffix, self.format)
|
||||
disposition = 'attachment; filename="{}"'.format(filename)
|
||||
response['Content-Disposition'] = disposition
|
||||
|
||||
def get_rendered_fields(self):
|
||||
fields = self.serializer.fields
|
||||
meta = getattr(self.serializer, 'Meta', None)
|
||||
if self.template == 'import':
|
||||
fields = [v for k, v in fields.items() if not v.read_only and k != "org_id" and k != 'id']
|
||||
fields_unimport = getattr(meta, 'fields_unimport_template', [])
|
||||
fields = [v for v in fields if v.field_name not in fields_unimport]
|
||||
elif self.template == 'update':
|
||||
fields = [v for k, v in fields.items() if not v.read_only and k != "org_id"]
|
||||
else:
|
||||
fields = [v for k, v in fields.items() if not v.write_only and k != "org_id"]
|
||||
|
||||
meta = getattr(self.serializer, 'Meta', None)
|
||||
if meta:
|
||||
fields_unexport = getattr(meta, 'fields_unexport', [])
|
||||
fields = [v for v in fields if v.field_name not in fields_unexport]
|
||||
fields_unexport = getattr(meta, 'fields_unexport', [])
|
||||
fields = [v for v in fields if v.field_name not in fields_unexport]
|
||||
return fields
|
||||
|
||||
@staticmethod
|
||||
|
@ -105,10 +109,10 @@ class BaseFileRenderer(BaseRenderer):
|
|||
value = field.to_file_representation(value)
|
||||
elif isinstance(value, bool):
|
||||
value = 'Yes' if value else 'No'
|
||||
elif isinstance(field, LabeledChoiceField):
|
||||
elif isinstance(field, common_fields.LabeledChoiceField):
|
||||
value = value or {}
|
||||
value = '{}({})'.format(value.get('label'), value.get('value'))
|
||||
elif isinstance(field, ObjectRelatedField):
|
||||
elif isinstance(field, common_fields.ObjectRelatedField):
|
||||
if field.many:
|
||||
value = [self.to_id_name(v) for v in value]
|
||||
else:
|
||||
|
@ -126,6 +130,53 @@ class BaseFileRenderer(BaseRenderer):
|
|||
value = json.dumps(value, cls=encoders.JSONEncoder, ensure_ascii=False)
|
||||
return str(value)
|
||||
|
||||
def get_field_help_text(self, field):
|
||||
text = ''
|
||||
if hasattr(field, 'get_render_help_text'):
|
||||
text = field.get_render_help_text()
|
||||
elif isinstance(field, serializers.BooleanField):
|
||||
text = _('Yes/No')
|
||||
elif isinstance(field, serializers.CharField):
|
||||
if field.max_length:
|
||||
text = _('Text, max length {}').format(field.max_length)
|
||||
else:
|
||||
text = _("Long text, no length limit")
|
||||
elif isinstance(field, serializers.IntegerField):
|
||||
text = _('Number, min {} max {}').format(field.min_value, field.max_value)
|
||||
text = text.replace('min None', '').replace('max None', '')
|
||||
elif isinstance(field, serializers.DateTimeField):
|
||||
text = _('Datetime format {}').format(timezone.now().strftime(settings.REST_FRAMEWORK['DATETIME_FORMAT']))
|
||||
elif isinstance(field, serializers.IPAddressField):
|
||||
text = _('IP')
|
||||
elif isinstance(field, serializers.ChoiceField):
|
||||
choices = [str(v) for v in field.choices.keys()]
|
||||
if isinstance(field, common_fields.LabeledChoiceField):
|
||||
text = _("Choices, format name(value), name is optional for human read,"
|
||||
" value is requisite, options {}").format(','.join(choices))
|
||||
else:
|
||||
text = _("Choices, options {}").format(",".join(choices))
|
||||
elif isinstance(field, common_fields.PhoneField):
|
||||
text = _("Phone number, format +8612345678901")
|
||||
elif isinstance(field, common_fields.LabeledChoiceField):
|
||||
text = _('Label, format ["key:value"]')
|
||||
elif isinstance(field, common_fields.ObjectRelatedField):
|
||||
text = _("Object, format name(id), name is optional for human read, id is requisite")
|
||||
elif isinstance(field, serializers.PrimaryKeyRelatedField):
|
||||
text = _('Object, format id')
|
||||
elif isinstance(field, serializers.ManyRelatedField):
|
||||
child_relation_class_name = field.child_relation.__class__.__name__
|
||||
if child_relation_class_name == "ObjectRelatedField":
|
||||
text = _('Objects, format ["name(id)", ...], name is optional for human read, id is requisite')
|
||||
elif child_relation_class_name == "LabelRelatedField":
|
||||
text = _('Labels, format ["key:value", ...], if label not exists, will create it')
|
||||
else:
|
||||
text = _('Objects, format ["id", ...]')
|
||||
elif isinstance(field, serializers.ListSerializer):
|
||||
child = field.child
|
||||
if hasattr(child, 'get_render_help_text'):
|
||||
text = child.get_render_help_text()
|
||||
return text
|
||||
|
||||
def generate_rows(self, data, render_fields):
|
||||
for item in data:
|
||||
row = []
|
||||
|
@ -135,6 +186,17 @@ class BaseFileRenderer(BaseRenderer):
|
|||
row.append(value)
|
||||
yield row
|
||||
|
||||
def write_help_text_if_need(self):
|
||||
if self.template == 'export':
|
||||
return
|
||||
fields = self.get_rendered_fields()
|
||||
row = []
|
||||
for f in fields:
|
||||
text = self.get_field_help_text(f)
|
||||
row.append(text)
|
||||
row[0] = '#Help ' + str(row[0])
|
||||
self.write_row(row)
|
||||
|
||||
@abc.abstractmethod
|
||||
def initial_writer(self):
|
||||
raise NotImplementedError
|
||||
|
@ -184,6 +246,7 @@ class BaseFileRenderer(BaseRenderer):
|
|||
rows = self.generate_rows(data, rendered_fields)
|
||||
self.initial_writer()
|
||||
self.write_column_titles(column_titles)
|
||||
self.write_help_text_if_need()
|
||||
self.write_rows(rows)
|
||||
self.after_render()
|
||||
value = self.get_rendered_value()
|
||||
|
|
|
@ -2,17 +2,17 @@
|
|||
#
|
||||
|
||||
import codecs
|
||||
|
||||
import unicodecsv
|
||||
from six import BytesIO
|
||||
|
||||
from .base import BaseFileRenderer
|
||||
from ..const import CSV_FILE_ESCAPE_CHARS
|
||||
|
||||
class CSVFileRenderer(BaseFileRenderer):
|
||||
|
||||
class CSVFileRenderer(BaseFileRenderer):
|
||||
media_type = 'text/csv'
|
||||
format = 'csv'
|
||||
|
||||
writer = None
|
||||
buffer = None
|
||||
|
||||
|
|
|
@ -409,8 +409,13 @@ class QuerySet(DJQuerySet):
|
|||
if not filter_calls:
|
||||
return {}
|
||||
names, multi_args, multi_kwargs = zip(*filter_calls)
|
||||
args = {
|
||||
key: value
|
||||
for arg in multi_args if arg
|
||||
for key, value in arg[0].children
|
||||
}
|
||||
kwargs = reduce(lambda x, y: {**x, **y}, multi_kwargs, {})
|
||||
|
||||
kwargs.update(args)
|
||||
striped_kwargs = {}
|
||||
for k, v in kwargs.items():
|
||||
k = k.replace('__exact', '')
|
||||
|
|
|
@ -1,15 +1,32 @@
|
|||
import os
|
||||
|
||||
import jms_storage
|
||||
|
||||
from django.conf import settings
|
||||
from django.core.files.storage import default_storage
|
||||
|
||||
from terminal.models import default_storage, ReplayStorage
|
||||
from common.utils import get_logger, make_dirs
|
||||
from terminal.models import ReplayStorage
|
||||
|
||||
logger = get_logger(__name__)
|
||||
|
||||
|
||||
def get_multi_object_storage():
|
||||
replay_storages = ReplayStorage.objects.all()
|
||||
configs = {}
|
||||
for storage in replay_storages:
|
||||
if storage.type_sftp:
|
||||
continue
|
||||
if storage.type_null_or_server:
|
||||
continue
|
||||
configs[storage.name] = storage.config
|
||||
if settings.SERVER_REPLAY_STORAGE:
|
||||
configs['SERVER_REPLAY_STORAGE'] = settings.SERVER_REPLAY_STORAGE
|
||||
if not configs:
|
||||
return None
|
||||
storage = jms_storage.get_multi_object_storage(configs)
|
||||
return storage
|
||||
|
||||
|
||||
class BaseStorageHandler(object):
|
||||
NAME = ''
|
||||
|
||||
|
@ -24,20 +41,10 @@ class BaseStorageHandler(object):
|
|||
raise NotImplementedError
|
||||
|
||||
def download(self):
|
||||
replay_storages = ReplayStorage.objects.all()
|
||||
configs = {}
|
||||
for storage in replay_storages:
|
||||
if storage.type_sftp:
|
||||
continue
|
||||
if storage.type_null_or_server:
|
||||
continue
|
||||
configs[storage.name] = storage.config
|
||||
if settings.SERVER_REPLAY_STORAGE:
|
||||
configs['SERVER_REPLAY_STORAGE'] = settings.SERVER_REPLAY_STORAGE
|
||||
if not configs:
|
||||
storage = get_multi_object_storage()
|
||||
if not storage:
|
||||
msg = f"Not found {self.NAME} file, and not remote storage set"
|
||||
return None, msg
|
||||
storage = jms_storage.get_multi_object_storage(configs)
|
||||
|
||||
remote_path, local_path = self.get_file_path(storage=storage)
|
||||
if not remote_path:
|
||||
|
|
|
@ -1,7 +1,15 @@
|
|||
import json
|
||||
import os
|
||||
import tarfile
|
||||
from itertools import chain
|
||||
|
||||
from terminal.models import default_storage
|
||||
from .base import BaseStorageHandler
|
||||
from django.core.files.storage import default_storage
|
||||
|
||||
from common.utils import make_dirs, get_logger
|
||||
from terminal.models import Session
|
||||
from .base import BaseStorageHandler, get_multi_object_storage
|
||||
|
||||
logger = get_logger(__name__)
|
||||
|
||||
|
||||
class ReplayStorageHandler(BaseStorageHandler):
|
||||
|
@ -29,3 +37,74 @@ class ReplayStorageHandler(BaseStorageHandler):
|
|||
url = default_storage.url(_local_path)
|
||||
return _local_path, url
|
||||
return None, f'{self.NAME} not found.'
|
||||
|
||||
|
||||
class SessionPartReplayStorageHandler(object):
|
||||
Name = 'SessionPartReplayStorageHandler'
|
||||
|
||||
def __init__(self, obj: Session):
|
||||
self.obj = obj
|
||||
|
||||
def find_local_part_file_path(self, part_filename):
|
||||
local_path = self.obj.get_replay_part_file_local_storage_path(part_filename)
|
||||
if default_storage.exists(local_path):
|
||||
url = default_storage.url(local_path)
|
||||
return local_path, url
|
||||
return None, '{} not found.'.format(part_filename)
|
||||
|
||||
def download_part_file(self, part_filename):
|
||||
storage = get_multi_object_storage()
|
||||
if not storage:
|
||||
msg = "Not found {} file, and not remote storage set".format(part_filename)
|
||||
return None, msg
|
||||
local_path = self.obj.get_replay_part_file_local_storage_path(part_filename)
|
||||
remote_path = self.obj.get_replay_part_file_relative_path(part_filename)
|
||||
|
||||
# 保存到storage的路径
|
||||
target_path = os.path.join(default_storage.base_location, local_path)
|
||||
|
||||
target_dir = os.path.dirname(target_path)
|
||||
if not os.path.isdir(target_dir):
|
||||
make_dirs(target_dir, exist_ok=True)
|
||||
|
||||
ok, err = storage.download(remote_path, target_path)
|
||||
if not ok:
|
||||
msg = 'Failed download {} file: {}'.format(part_filename, err)
|
||||
logger.error(msg)
|
||||
return None, msg
|
||||
url = default_storage.url(local_path)
|
||||
return local_path, url
|
||||
|
||||
def get_part_file_path_url(self, part_filename):
|
||||
local_path, url = self.find_local_part_file_path(part_filename)
|
||||
if local_path is None:
|
||||
local_path, url = self.download_part_file(part_filename)
|
||||
return local_path, url
|
||||
|
||||
def prepare_offline_tar_file(self):
|
||||
replay_meta_filename = '{}.replay.json'.format(self.obj.id)
|
||||
meta_local_path, url_or_error = self.get_part_file_path_url(replay_meta_filename)
|
||||
if not meta_local_path:
|
||||
raise FileNotFoundError(f'{replay_meta_filename} not found: {url_or_error}')
|
||||
meta_local_abs_path = os.path.join(default_storage.base_location, meta_local_path)
|
||||
with open(meta_local_abs_path, 'r') as f:
|
||||
meta_data = json.load(f)
|
||||
if not meta_data:
|
||||
raise FileNotFoundError(f'{replay_meta_filename} is empty')
|
||||
part_filenames = [part_file.get('name') for part_file in meta_data.get('files', [])]
|
||||
for part_filename in part_filenames:
|
||||
if not part_filename:
|
||||
continue
|
||||
local_path, url_or_error = self.get_part_file_path_url(part_filename)
|
||||
if not local_path:
|
||||
raise FileNotFoundError(f'{part_filename} not found: {url_or_error}')
|
||||
dir_path = os.path.dirname(meta_local_abs_path)
|
||||
offline_filename = '{}.tar'.format(self.obj.id)
|
||||
offline_filename_abs_path = os.path.join(dir_path, offline_filename)
|
||||
if not os.path.exists(offline_filename_abs_path):
|
||||
with tarfile.open(offline_filename_abs_path, 'w') as f:
|
||||
f.add(str(meta_local_abs_path), arcname=replay_meta_filename)
|
||||
for part_filename in part_filenames:
|
||||
local_abs_path = os.path.join(dir_path, part_filename)
|
||||
f.add(local_abs_path, arcname=part_filename)
|
||||
return open(offline_filename_abs_path, 'rb')
|
||||
|
|
|
@ -1,10 +1,10 @@
|
|||
import os
|
||||
|
||||
import jms_storage
|
||||
from celery import shared_task
|
||||
from django.conf import settings
|
||||
from django.core.mail import send_mail, EmailMultiAlternatives, get_connection
|
||||
from django.utils.translation import gettext_lazy as _
|
||||
import jms_storage
|
||||
|
||||
from .utils import get_logger
|
||||
|
||||
|
@ -28,7 +28,13 @@ def task_activity_callback(self, subject, message, recipient_list, *args, **kwar
|
|||
return resource_ids,
|
||||
|
||||
|
||||
@shared_task(verbose_name=_("Send email"), activity_callback=task_activity_callback)
|
||||
@shared_task(
|
||||
verbose_name=_("Send email"),
|
||||
activity_callback=task_activity_callback,
|
||||
description=_(
|
||||
"This task will be executed when sending email notifications"
|
||||
)
|
||||
)
|
||||
def send_mail_async(*args, **kwargs):
|
||||
""" Using celery to send email async
|
||||
|
||||
|
@ -55,7 +61,14 @@ def send_mail_async(*args, **kwargs):
|
|||
logger.error("Sending mail error: {}".format(e))
|
||||
|
||||
|
||||
@shared_task(verbose_name=_("Send email attachment"), activity_callback=task_activity_callback)
|
||||
@shared_task(
|
||||
verbose_name=_("Send email attachment"),
|
||||
activity_callback=task_activity_callback,
|
||||
description=_(
|
||||
"""When an account password is changed or an account backup generates attachments,
|
||||
this task needs to be executed for sending emails and handling attachments"""
|
||||
)
|
||||
)
|
||||
def send_mail_attachment_async(subject, message, recipient_list, attachment_list=None):
|
||||
if attachment_list is None:
|
||||
attachment_list = []
|
||||
|
@ -77,7 +90,12 @@ def send_mail_attachment_async(subject, message, recipient_list, attachment_list
|
|||
logger.error("Sending mail attachment error: {}".format(e))
|
||||
|
||||
|
||||
@shared_task(verbose_name=_('Upload session replay to external storage'))
|
||||
@shared_task(
|
||||
verbose_name=_('Upload account backup to external storage'),
|
||||
description=_(
|
||||
"When performing an account backup, this task needs to be executed to external storage (SFTP)"
|
||||
)
|
||||
)
|
||||
def upload_backup_to_obj_storage(recipient, upload_file):
|
||||
logger.info(f'Start upload file : {upload_file}')
|
||||
remote_path = os.path.join('account_backup', os.path.basename(upload_file))
|
||||
|
|
|
@ -13,6 +13,7 @@ from collections import OrderedDict
|
|||
from functools import wraps
|
||||
from itertools import chain
|
||||
|
||||
import html2text
|
||||
import psutil
|
||||
from django.conf import settings
|
||||
from django.templatetags.static import static
|
||||
|
@ -157,7 +158,7 @@ def is_uuid(seq):
|
|||
def get_request_ip(request):
|
||||
x_forwarded_for = request.META.get('HTTP_X_FORWARDED_FOR', '').split(',')
|
||||
if x_forwarded_for and x_forwarded_for[0]:
|
||||
login_ip = x_forwarded_for[0]
|
||||
login_ip = x_forwarded_for[0].split(":")[0]
|
||||
return login_ip
|
||||
|
||||
login_ip = request.META.get('REMOTE_ADDR', '')
|
||||
|
@ -292,7 +293,7 @@ def get_docker_mem_usage_if_limit():
|
|||
inactive_file = int(inactive_file)
|
||||
return ((usage_in_bytes - inactive_file) / limit_in_bytes) * 100
|
||||
|
||||
except Exception as e:
|
||||
except Exception:
|
||||
return None
|
||||
|
||||
|
||||
|
@ -421,3 +422,14 @@ def distinct(seq, key=None):
|
|||
|
||||
def is_macos():
|
||||
return platform.system() == 'Darwin'
|
||||
|
||||
|
||||
def convert_html_to_markdown(html_str):
|
||||
h = html2text.HTML2Text()
|
||||
h.body_width = 0
|
||||
h.ignore_links = False
|
||||
|
||||
markdown = h.handle(html_str)
|
||||
markdown = markdown.replace('\n\n', '\n')
|
||||
markdown = markdown.replace('\n ', '\n')
|
||||
return markdown
|
||||
|
|
|
@ -39,10 +39,6 @@ def iso8601_to_unixtime(time_string):
|
|||
return to_unixtime(time_string, _ISO8601_FORMAT)
|
||||
|
||||
|
||||
def get_remote_addr(request):
|
||||
return request.META.get("HTTP_X_FORWARDED_HOST") or request.META.get("REMOTE_ADDR")
|
||||
|
||||
|
||||
def is_true(value):
|
||||
return value in BooleanField.TRUE_VALUES
|
||||
|
||||
|
|
|
@ -13,7 +13,13 @@ from common.utils.random import random_string
|
|||
logger = get_logger(__file__)
|
||||
|
||||
|
||||
@shared_task(verbose_name=_('Send SMS code'))
|
||||
@shared_task(
|
||||
verbose_name=_('Send SMS code'),
|
||||
description=_(
|
||||
"""When resetting a password, forgetting a password, or verifying MFA, this task needs to
|
||||
be executed to send SMS messages"""
|
||||
)
|
||||
)
|
||||
def send_sms_async(target, code):
|
||||
SMS().send_verify_code(target, code)
|
||||
|
||||
|
|
|
@ -67,5 +67,8 @@
|
|||
"Version": "Version",
|
||||
"ViewData": "View data",
|
||||
"WaitCommandReviewMessage": "The review request has been initiated, please wait for the review results",
|
||||
"initializingDatasourceFailedMessage": "Connection failed, please check if the database connection configuration is correct"
|
||||
"initializingDatasourceFailedMessage": "Connection failed, please check if the database connection configuration is correct",
|
||||
"Warning": "Warning",
|
||||
"ExecutionCanceled": "Execution Canceled",
|
||||
"CommandWarningDialogMessage": "The command you executed is risky and an alert notification will be sent to the administrator. Do you want to continue?"
|
||||
}
|
|
@ -9,6 +9,7 @@
|
|||
"CommandReviewMessage": "入力されたコマンドはレビュー後に実行されます。レビューリクエストを送信しますか?",
|
||||
"CommandReviewRejectBy": "コマンドレビューが%sに拒否されました",
|
||||
"CommandReviewTimeoutError": "コマンドレビューがタイムアウトしました",
|
||||
"CommandWarningDialogMessage": "あなたが実行したコマンドにはリスクがあり、警告通知が管理者に送信されます。続行しますか?",
|
||||
"Confirm": "確認",
|
||||
"ConnectError": "接続に失敗しました",
|
||||
"ConnectSuccess": "接続に成功しました",
|
||||
|
@ -22,6 +23,7 @@
|
|||
"ErrorMessage": "エラーメッセージ",
|
||||
"ExecuteError": "実行に失敗しました",
|
||||
"ExecuteSuccess": "実行に成功しました",
|
||||
"ExecutionCanceled": "実行がキャンセルされました",
|
||||
"ExportALL": "すべてのデータをエクスポート",
|
||||
"ExportAll": "すべてエクスポート",
|
||||
"ExportCurrent": "現在のページをエクスポート",
|
||||
|
@ -67,5 +69,6 @@
|
|||
"Version": "バージョン",
|
||||
"ViewData": "データを見る",
|
||||
"WaitCommandReviewMessage": "レビューリクエストが送信されました。レビュー結果をお待ちください",
|
||||
"Warning": "警告",
|
||||
"initializingDatasourceFailedMessage": "接続に失敗しました。データベース接続設定が正しいか確認してください"
|
||||
}
|
|
@ -67,5 +67,8 @@
|
|||
"Version": "版本",
|
||||
"ViewData": "查看数据",
|
||||
"WaitCommandReviewMessage": "复核请求已发起, 请等待复核结果",
|
||||
"initializingDatasourceFailedMessage": "连接失败,请检查数据库连接配置是否正确"
|
||||
"initializingDatasourceFailedMessage": "连接失败,请检查数据库连接配置是否正确",
|
||||
"Warning": "警告",
|
||||
"ExecutionCanceled": "执行已取消",
|
||||
"CommandWarningDialogMessage": "您执行的命令存在风险,告警通知将发送给管理员。是否继续?"
|
||||
}
|
|
@ -9,6 +9,7 @@
|
|||
"CommandReviewMessage": "您輸入的命令需要覆核後才可以執行,是否發起覆核請求?",
|
||||
"CommandReviewRejectBy": "命令覆核被 %s 拒絕",
|
||||
"CommandReviewTimeoutError": "命令覆核超時",
|
||||
"CommandWarningDialogMessage": "您進行的動作存在風險,警告通知將會寄給管理員。你確定要繼續嗎?",
|
||||
"Confirm": "確認",
|
||||
"ConnectError": "連接失敗",
|
||||
"ConnectSuccess": "連接成功",
|
||||
|
@ -22,6 +23,7 @@
|
|||
"ErrorMessage": "錯誤消息",
|
||||
"ExecuteError": "執行失敗",
|
||||
"ExecuteSuccess": "執行成功",
|
||||
"ExecutionCanceled": "動作已取消",
|
||||
"ExportALL": "匯出所有資料",
|
||||
"ExportAll": "匯出全部",
|
||||
"ExportCurrent": "匯出當前頁面",
|
||||
|
@ -67,5 +69,6 @@
|
|||
"Version": "版本",
|
||||
"ViewData": "查看資料",
|
||||
"WaitCommandReviewMessage": "覆核請求已發起,請等待覆核結果",
|
||||
"Warning": "警告。",
|
||||
"initializingDatasourceFailedMessage": "連接失敗,請檢查資料庫連接配置是否正確"
|
||||
}
|
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue