mirror of https://github.com/jumpserver/jumpserver
feat: Supports saving operate-logs, user-login-logs, password-change-logs, and FTP-logs to ES
commit
ece93680f0
|
@ -1,4 +1,4 @@
|
|||
FROM jumpserver/core-base:20240815_080231 AS stage-build
|
||||
FROM jumpserver/core-base:20240913_063833 AS stage-build
|
||||
|
||||
ARG VERSION
|
||||
|
||||
|
|
|
@ -43,14 +43,18 @@ RUN set -ex \
|
|||
WORKDIR /opt/jumpserver
|
||||
|
||||
ARG PIP_MIRROR=https://pypi.org/simple
|
||||
ENV ANSIBLE_COLLECTIONS_PATHS=/opt/py3/lib/python3.11/site-packages/ansible_collections
|
||||
|
||||
RUN --mount=type=cache,target=/root/.cache,sharing=locked,id=core \
|
||||
--mount=type=bind,source=poetry.lock,target=poetry.lock \
|
||||
--mount=type=bind,source=pyproject.toml,target=pyproject.toml \
|
||||
--mount=type=bind,source=utils/clean_site_packages.sh,target=clean_site_packages.sh \
|
||||
--mount=type=bind,source=requirements/collections.yml,target=collections.yml \
|
||||
set -ex \
|
||||
&& python3 -m venv /opt/py3 \
|
||||
&& pip install poetry -i ${PIP_MIRROR} \
|
||||
&& poetry config virtualenvs.create false \
|
||||
&& . /opt/py3/bin/activate \
|
||||
&& poetry install --only main \
|
||||
&& ansible-galaxy collection install -r collections.yml --force --ignore-certs \
|
||||
&& bash clean_site_packages.sh
|
||||
|
|
|
@ -4,6 +4,9 @@
|
|||
ansible_python_interpreter: /opt/py3/bin/python
|
||||
db_name: "{{ jms_asset.spec_info.db_name }}"
|
||||
check_ssl: "{{ jms_asset.spec_info.use_ssl and not jms_asset.spec_info.allow_invalid_cert }}"
|
||||
ca_cert: "{{ jms_asset.secret_info.ca_cert | default('') }}"
|
||||
ssl_cert: "{{ jms_asset.secret_info.client_cert | default('') }}"
|
||||
ssl_key: "{{ jms_asset.secret_info.client_key | default('') }}"
|
||||
|
||||
tasks:
|
||||
- name: Test MySQL connection
|
||||
|
@ -13,9 +16,9 @@
|
|||
login_host: "{{ jms_asset.address }}"
|
||||
login_port: "{{ jms_asset.port }}"
|
||||
check_hostname: "{{ check_ssl if check_ssl else omit }}"
|
||||
ca_cert: "{{ jms_asset.secret_info.ca_cert | default(omit) if check_ssl else omit }}"
|
||||
client_cert: "{{ jms_asset.secret_info.client_cert | default(omit) if check_ssl else omit }}"
|
||||
client_key: "{{ jms_asset.secret_info.client_key | default(omit) if check_ssl else omit }}"
|
||||
ca_cert: "{{ ca_cert if check_ssl and ca_cert | length > 0 else omit }}"
|
||||
client_cert: "{{ ssl_cert if check_ssl and ssl_cert | length > 0 else omit }}"
|
||||
client_key: "{{ ssl_key if check_ssl and ssl_key | length > 0 else omit }}"
|
||||
filter: version
|
||||
register: db_info
|
||||
|
||||
|
@ -30,9 +33,9 @@
|
|||
login_host: "{{ jms_asset.address }}"
|
||||
login_port: "{{ jms_asset.port }}"
|
||||
check_hostname: "{{ check_ssl if check_ssl else omit }}"
|
||||
ca_cert: "{{ jms_asset.secret_info.ca_cert | default(omit) if check_ssl else omit }}"
|
||||
client_cert: "{{ jms_asset.secret_info.client_cert | default(omit) if check_ssl else omit }}"
|
||||
client_key: "{{ jms_asset.secret_info.client_key | default(omit) if check_ssl else omit }}"
|
||||
ca_cert: "{{ ca_cert if check_ssl and ca_cert | length > 0 else omit }}"
|
||||
client_cert: "{{ ssl_cert if check_ssl and ssl_cert | length > 0 else omit }}"
|
||||
client_key: "{{ ssl_key if check_ssl and ssl_key | length > 0 else omit }}"
|
||||
name: "{{ account.username }}"
|
||||
password: "{{ account.secret }}"
|
||||
host: "%"
|
||||
|
@ -47,7 +50,7 @@
|
|||
login_host: "{{ jms_asset.address }}"
|
||||
login_port: "{{ jms_asset.port }}"
|
||||
check_hostname: "{{ check_ssl if check_ssl else omit }}"
|
||||
ca_cert: "{{ jms_asset.secret_info.ca_cert | default(omit) if check_ssl else omit }}"
|
||||
client_cert: "{{ jms_asset.secret_info.client_cert | default(omit) if check_ssl else omit }}"
|
||||
client_key: "{{ jms_asset.secret_info.client_key | default(omit) if check_ssl else omit }}"
|
||||
ca_cert: "{{ ca_cert if check_ssl and ca_cert | length > 0 else omit }}"
|
||||
client_cert: "{{ ssl_cert if check_ssl and ssl_cert | length > 0 else omit }}"
|
||||
client_key: "{{ ssl_key if check_ssl and ssl_key | length > 0 else omit }}"
|
||||
filter: version
|
||||
|
|
|
@ -2,6 +2,10 @@
|
|||
gather_facts: no
|
||||
vars:
|
||||
ansible_python_interpreter: /opt/py3/bin/python
|
||||
check_ssl: "{{ jms_asset.spec_info.use_ssl }}"
|
||||
ca_cert: "{{ jms_asset.secret_info.ca_cert | default('') }}"
|
||||
ssl_cert: "{{ jms_asset.secret_info.client_cert | default('') }}"
|
||||
ssl_key: "{{ jms_asset.secret_info.client_key | default('') }}"
|
||||
|
||||
tasks:
|
||||
- name: Test PostgreSQL connection
|
||||
|
@ -11,6 +15,10 @@
|
|||
login_host: "{{ jms_asset.address }}"
|
||||
login_port: "{{ jms_asset.port }}"
|
||||
login_db: "{{ jms_asset.spec_info.db_name }}"
|
||||
ca_cert: "{{ ca_cert if check_ssl and ca_cert | length > 0 else omit }}"
|
||||
ssl_cert: "{{ ssl_cert if check_ssl and ssl_cert | length > 0 else omit }}"
|
||||
ssl_key: "{{ ssl_key if check_ssl and ssl_key | length > 0 else omit }}"
|
||||
ssl_mode: "{{ jms_asset.spec_info.pg_ssl_mode }}"
|
||||
register: result
|
||||
failed_when: not result.is_available
|
||||
|
||||
|
@ -28,6 +36,10 @@
|
|||
db: "{{ jms_asset.spec_info.db_name }}"
|
||||
name: "{{ account.username }}"
|
||||
password: "{{ account.secret }}"
|
||||
ca_cert: "{{ ca_cert if check_ssl and ca_cert | length > 0 else omit }}"
|
||||
ssl_cert: "{{ ssl_cert if check_ssl and ssl_cert | length > 0 else omit }}"
|
||||
ssl_key: "{{ ssl_key if check_ssl and ssl_key | length > 0 else omit }}"
|
||||
ssl_mode: "{{ jms_asset.spec_info.pg_ssl_mode }}"
|
||||
role_attr_flags: LOGIN
|
||||
ignore_errors: true
|
||||
when: result is succeeded
|
||||
|
@ -39,3 +51,7 @@
|
|||
login_host: "{{ jms_asset.address }}"
|
||||
login_port: "{{ jms_asset.port }}"
|
||||
db: "{{ jms_asset.spec_info.db_name }}"
|
||||
ca_cert: "{{ ca_cert if check_ssl and ca_cert | length > 0 else omit }}"
|
||||
ssl_cert: "{{ ssl_cert if check_ssl and ssl_cert | length > 0 else omit }}"
|
||||
ssl_key: "{{ ssl_key if check_ssl and ssl_key | length > 0 else omit }}"
|
||||
ssl_mode: "{{ jms_asset.spec_info.pg_ssl_mode }}"
|
||||
|
|
|
@ -3,6 +3,9 @@
|
|||
vars:
|
||||
ansible_python_interpreter: /opt/py3/bin/python
|
||||
check_ssl: "{{ jms_asset.spec_info.use_ssl and not jms_asset.spec_info.allow_invalid_cert }}"
|
||||
ca_cert: "{{ jms_asset.secret_info.ca_cert | default('') }}"
|
||||
ssl_cert: "{{ jms_asset.secret_info.client_cert | default('') }}"
|
||||
ssl_key: "{{ jms_asset.secret_info.client_key | default('') }}"
|
||||
|
||||
tasks:
|
||||
- name: Get info
|
||||
|
@ -12,9 +15,9 @@
|
|||
login_host: "{{ jms_asset.address }}"
|
||||
login_port: "{{ jms_asset.port }}"
|
||||
check_hostname: "{{ check_ssl if check_ssl else omit }}"
|
||||
ca_cert: "{{ jms_asset.secret_info.ca_cert | default(omit) if check_ssl else omit }}"
|
||||
client_cert: "{{ jms_asset.secret_info.client_cert | default(omit) if check_ssl else omit }}"
|
||||
client_key: "{{ jms_asset.secret_info.client_key | default(omit) if check_ssl else omit }}"
|
||||
ca_cert: "{{ ca_cert if check_ssl and ca_cert | length > 0 else omit }}"
|
||||
client_cert: "{{ ssl_cert if check_ssl and ssl_cert | length > 0 else omit }}"
|
||||
client_key: "{{ ssl_key if check_ssl and ssl_key | length > 0 else omit }}"
|
||||
filter: users
|
||||
register: db_info
|
||||
|
||||
|
|
|
@ -2,6 +2,10 @@
|
|||
gather_facts: no
|
||||
vars:
|
||||
ansible_python_interpreter: /opt/py3/bin/python
|
||||
check_ssl: "{{ jms_asset.spec_info.use_ssl }}"
|
||||
ca_cert: "{{ jms_asset.secret_info.ca_cert | default('') }}"
|
||||
ssl_cert: "{{ jms_asset.secret_info.client_cert | default('') }}"
|
||||
ssl_key: "{{ jms_asset.secret_info.client_key | default('') }}"
|
||||
|
||||
tasks:
|
||||
- name: Get info
|
||||
|
@ -11,6 +15,10 @@
|
|||
login_host: "{{ jms_asset.address }}"
|
||||
login_port: "{{ jms_asset.port }}"
|
||||
login_db: "{{ jms_asset.spec_info.db_name }}"
|
||||
ca_cert: "{{ ca_cert if check_ssl and ca_cert | length > 0 else omit }}"
|
||||
ssl_cert: "{{ ssl_cert if check_ssl and ssl_cert | length > 0 else omit }}"
|
||||
ssl_key: "{{ ssl_key if check_ssl and ssl_key | length > 0 else omit }}"
|
||||
ssl_mode: "{{ jms_asset.spec_info.pg_ssl_mode }}"
|
||||
filter: "roles"
|
||||
register: db_info
|
||||
|
||||
|
|
|
@ -4,6 +4,9 @@
|
|||
ansible_python_interpreter: /opt/py3/bin/python
|
||||
db_name: "{{ jms_asset.spec_info.db_name }}"
|
||||
check_ssl: "{{ jms_asset.spec_info.use_ssl and not jms_asset.spec_info.allow_invalid_cert }}"
|
||||
ca_cert: "{{ jms_asset.secret_info.ca_cert | default('') }}"
|
||||
ssl_cert: "{{ jms_asset.secret_info.client_cert | default('') }}"
|
||||
ssl_key: "{{ jms_asset.secret_info.client_key | default('') }}"
|
||||
|
||||
tasks:
|
||||
- name: Test MySQL connection
|
||||
|
@ -13,9 +16,9 @@
|
|||
login_host: "{{ jms_asset.address }}"
|
||||
login_port: "{{ jms_asset.port }}"
|
||||
check_hostname: "{{ check_ssl if check_ssl else omit }}"
|
||||
ca_cert: "{{ jms_asset.secret_info.ca_cert | default(omit) if check_ssl else omit }}"
|
||||
client_cert: "{{ jms_asset.secret_info.client_cert | default(omit) if check_ssl else omit }}"
|
||||
client_key: "{{ jms_asset.secret_info.client_key | default(omit) if check_ssl else omit }}"
|
||||
ca_cert: "{{ ca_cert if check_ssl and ca_cert | length > 0 else omit }}"
|
||||
client_cert: "{{ ssl_cert if check_ssl and ssl_cert | length > 0 else omit }}"
|
||||
client_key: "{{ ssl_key if check_ssl and ssl_key | length > 0 else omit }}"
|
||||
filter: version
|
||||
register: db_info
|
||||
|
||||
|
@ -30,9 +33,9 @@
|
|||
login_host: "{{ jms_asset.address }}"
|
||||
login_port: "{{ jms_asset.port }}"
|
||||
check_hostname: "{{ check_ssl if check_ssl else omit }}"
|
||||
ca_cert: "{{ jms_asset.secret_info.ca_cert | default(omit) if check_ssl else omit }}"
|
||||
client_cert: "{{ jms_asset.secret_info.client_cert | default(omit) if check_ssl else omit }}"
|
||||
client_key: "{{ jms_asset.secret_info.client_key | default(omit) if check_ssl else omit }}"
|
||||
ca_cert: "{{ ca_cert if check_ssl and ca_cert | length > 0 else omit }}"
|
||||
client_cert: "{{ ssl_cert if check_ssl and ssl_cert | length > 0 else omit }}"
|
||||
client_key: "{{ ssl_key if check_ssl and ssl_key | length > 0 else omit }}"
|
||||
name: "{{ account.username }}"
|
||||
password: "{{ account.secret }}"
|
||||
host: "%"
|
||||
|
@ -47,7 +50,7 @@
|
|||
login_host: "{{ jms_asset.address }}"
|
||||
login_port: "{{ jms_asset.port }}"
|
||||
check_hostname: "{{ check_ssl if check_ssl else omit }}"
|
||||
ca_cert: "{{ jms_asset.secret_info.ca_cert | default(omit) if check_ssl else omit }}"
|
||||
client_cert: "{{ jms_asset.secret_info.client_cert | default(omit) if check_ssl else omit }}"
|
||||
client_key: "{{ jms_asset.secret_info.client_key | default(omit) if check_ssl else omit }}"
|
||||
ca_cert: "{{ ca_cert if check_ssl and ca_cert | length > 0 else omit }}"
|
||||
client_cert: "{{ ssl_cert if check_ssl and ssl_cert | length > 0 else omit }}"
|
||||
client_key: "{{ ssl_key if check_ssl and ssl_key | length > 0 else omit }}"
|
||||
filter: version
|
||||
|
|
|
@ -2,6 +2,10 @@
|
|||
gather_facts: no
|
||||
vars:
|
||||
ansible_python_interpreter: /opt/py3/bin/python
|
||||
check_ssl: "{{ jms_asset.spec_info.use_ssl }}"
|
||||
ca_cert: "{{ jms_asset.secret_info.ca_cert | default('') }}"
|
||||
ssl_cert: "{{ jms_asset.secret_info.client_cert | default('') }}"
|
||||
ssl_key: "{{ jms_asset.secret_info.client_key | default('') }}"
|
||||
|
||||
tasks:
|
||||
- name: Test PostgreSQL connection
|
||||
|
@ -11,6 +15,10 @@
|
|||
login_host: "{{ jms_asset.address }}"
|
||||
login_port: "{{ jms_asset.port }}"
|
||||
login_db: "{{ jms_asset.spec_info.db_name }}"
|
||||
ca_cert: "{{ ca_cert if check_ssl and ca_cert | length > 0 else omit }}"
|
||||
ssl_cert: "{{ ssl_cert if check_ssl and ssl_cert | length > 0 else omit }}"
|
||||
ssl_key: "{{ ssl_key if check_ssl and ssl_key | length > 0 else omit }}"
|
||||
ssl_mode: "{{ jms_asset.spec_info.pg_ssl_mode }}"
|
||||
register: result
|
||||
failed_when: not result.is_available
|
||||
|
||||
|
@ -28,6 +36,10 @@
|
|||
db: "{{ jms_asset.spec_info.db_name }}"
|
||||
name: "{{ account.username }}"
|
||||
password: "{{ account.secret }}"
|
||||
ca_cert: "{{ ca_cert if check_ssl and ca_cert | length > 0 else omit }}"
|
||||
ssl_cert: "{{ ssl_cert if check_ssl and ssl_cert | length > 0 else omit }}"
|
||||
ssl_key: "{{ ssl_key if check_ssl and ssl_key | length > 0 else omit }}"
|
||||
ssl_mode: "{{ jms_asset.spec_info.pg_ssl_mode }}"
|
||||
role_attr_flags: LOGIN
|
||||
ignore_errors: true
|
||||
when: result is succeeded
|
||||
|
@ -40,6 +52,10 @@
|
|||
login_host: "{{ jms_asset.address }}"
|
||||
login_port: "{{ jms_asset.port }}"
|
||||
db: "{{ jms_asset.spec_info.db_name }}"
|
||||
ca_cert: "{{ ca_cert if check_ssl and ca_cert | length > 0 else omit }}"
|
||||
ssl_cert: "{{ ssl_cert if check_ssl and ssl_cert | length > 0 else omit }}"
|
||||
ssl_key: "{{ ssl_key if check_ssl and ssl_key | length > 0 else omit }}"
|
||||
ssl_mode: "{{ jms_asset.spec_info.pg_ssl_mode }}"
|
||||
when:
|
||||
- result is succeeded
|
||||
- change_info is succeeded
|
||||
|
|
|
@ -3,6 +3,9 @@
|
|||
vars:
|
||||
ansible_python_interpreter: /opt/py3/bin/python
|
||||
check_ssl: "{{ jms_asset.spec_info.use_ssl and not jms_asset.spec_info.allow_invalid_cert }}"
|
||||
ca_cert: "{{ jms_asset.secret_info.ca_cert | default('') }}"
|
||||
ssl_cert: "{{ jms_asset.secret_info.client_cert | default('') }}"
|
||||
ssl_key: "{{ jms_asset.secret_info.client_key | default('') }}"
|
||||
|
||||
tasks:
|
||||
- name: "Remove account"
|
||||
|
@ -12,8 +15,8 @@
|
|||
login_host: "{{ jms_asset.address }}"
|
||||
login_port: "{{ jms_asset.port }}"
|
||||
check_hostname: "{{ check_ssl if check_ssl else omit }}"
|
||||
ca_cert: "{{ jms_asset.secret_info.ca_cert | default(omit) if check_ssl else omit }}"
|
||||
client_cert: "{{ jms_asset.secret_info.client_cert | default(omit) if check_ssl else omit }}"
|
||||
client_key: "{{ jms_asset.secret_info.client_key | default(omit) if check_ssl else omit }}"
|
||||
ca_cert: "{{ ca_cert if check_ssl and ca_cert | length > 0 else omit }}"
|
||||
client_cert: "{{ ssl_cert if check_ssl and ssl_cert | length > 0 else omit }}"
|
||||
client_key: "{{ ssl_key if check_ssl and ssl_key | length > 0 else omit }}"
|
||||
name: "{{ account.username }}"
|
||||
state: absent
|
||||
|
|
|
@ -2,6 +2,10 @@
|
|||
gather_facts: no
|
||||
vars:
|
||||
ansible_python_interpreter: /opt/py3/bin/python
|
||||
check_ssl: "{{ jms_asset.spec_info.use_ssl }}"
|
||||
ca_cert: "{{ jms_asset.secret_info.ca_cert | default('') }}"
|
||||
ssl_cert: "{{ jms_asset.secret_info.client_cert | default('') }}"
|
||||
ssl_key: "{{ jms_asset.secret_info.client_key | default('') }}"
|
||||
|
||||
tasks:
|
||||
- name: "Remove account"
|
||||
|
@ -12,4 +16,8 @@
|
|||
login_port: "{{ jms_asset.port }}"
|
||||
db: "{{ jms_asset.spec_info.db_name }}"
|
||||
name: "{{ account.username }}"
|
||||
ca_cert: "{{ ca_cert if check_ssl and ca_cert | length > 0 else omit }}"
|
||||
ssl_cert: "{{ ssl_cert if check_ssl and ssl_cert | length > 0 else omit }}"
|
||||
ssl_key: "{{ ssl_key if check_ssl and ssl_key | length > 0 else omit }}"
|
||||
ssl_mode: "{{ jms_asset.spec_info.pg_ssl_mode }}"
|
||||
state: absent
|
||||
|
|
|
@ -3,6 +3,9 @@
|
|||
vars:
|
||||
ansible_python_interpreter: /opt/py3/bin/python
|
||||
check_ssl: "{{ jms_asset.spec_info.use_ssl and not jms_asset.spec_info.allow_invalid_cert }}"
|
||||
ca_cert: "{{ jms_asset.secret_info.ca_cert | default('') }}"
|
||||
ssl_cert: "{{ jms_asset.secret_info.client_cert | default('') }}"
|
||||
ssl_key: "{{ jms_asset.secret_info.client_key | default('') }}"
|
||||
|
||||
tasks:
|
||||
- name: Verify account
|
||||
|
@ -12,7 +15,7 @@
|
|||
login_host: "{{ jms_asset.address }}"
|
||||
login_port: "{{ jms_asset.port }}"
|
||||
check_hostname: "{{ check_ssl if check_ssl else omit }}"
|
||||
ca_cert: "{{ jms_asset.secret_info.ca_cert | default(omit) if check_ssl else omit }}"
|
||||
client_cert: "{{ jms_asset.secret_info.client_cert | default(omit) if check_ssl else omit }}"
|
||||
client_key: "{{ jms_asset.secret_info.client_key | default(omit) if check_ssl else omit }}"
|
||||
ca_cert: "{{ ca_cert if check_ssl and ca_cert | length > 0 else omit }}"
|
||||
client_cert: "{{ ssl_cert if check_ssl and ssl_cert | length > 0 else omit }}"
|
||||
client_key: "{{ ssl_key if check_ssl and ssl_key | length > 0 else omit }}"
|
||||
filter: version
|
||||
|
|
|
@ -2,6 +2,10 @@
|
|||
gather_facts: no
|
||||
vars:
|
||||
ansible_python_interpreter: /opt/py3/bin/python
|
||||
check_ssl: "{{ jms_asset.spec_info.use_ssl }}"
|
||||
ca_cert: "{{ jms_asset.secret_info.ca_cert | default('') }}"
|
||||
ssl_cert: "{{ jms_asset.secret_info.client_cert | default('') }}"
|
||||
ssl_key: "{{ jms_asset.secret_info.client_key | default('') }}"
|
||||
|
||||
tasks:
|
||||
- name: Verify account
|
||||
|
@ -11,5 +15,9 @@
|
|||
login_host: "{{ jms_asset.address }}"
|
||||
login_port: "{{ jms_asset.port }}"
|
||||
db: "{{ jms_asset.spec_info.db_name }}"
|
||||
ca_cert: "{{ ca_cert if check_ssl and ca_cert | length > 0 else omit }}"
|
||||
ssl_cert: "{{ ssl_cert if check_ssl and ssl_cert | length > 0 else omit }}"
|
||||
ssl_key: "{{ ssl_key if check_ssl and ssl_key | length > 0 else omit }}"
|
||||
ssl_mode: "{{ jms_asset.spec_info.pg_ssl_mode }}"
|
||||
register: result
|
||||
failed_when: not result.is_available
|
||||
|
|
|
@ -178,7 +178,7 @@ class AccountCreateUpdateSerializerMixin(serializers.Serializer):
|
|||
instance.save()
|
||||
return instance, 'updated'
|
||||
else:
|
||||
raise serializers.ValidationError('Account already exists')
|
||||
raise serializers.ValidationError(_('Account already exists'))
|
||||
|
||||
def create(self, validated_data):
|
||||
push_now = validated_data.pop('push_now', None)
|
||||
|
|
|
@ -28,8 +28,14 @@ def task_activity_callback(self, pid, trigger, tp, *args, **kwargs):
|
|||
|
||||
|
||||
@shared_task(
|
||||
queue='ansible', verbose_name=_('Account execute automation'),
|
||||
activity_callback=task_activity_callback
|
||||
queue='ansible',
|
||||
verbose_name=_('Account execute automation'),
|
||||
activity_callback=task_activity_callback,
|
||||
description=_(
|
||||
"""Unified execution entry for account automation tasks: when the system performs tasks
|
||||
such as account push, password change, account verification, account collection,
|
||||
and gateway account verification, all tasks are executed through this unified entry"""
|
||||
)
|
||||
)
|
||||
def execute_account_automation_task(pid, trigger, tp):
|
||||
model = AutomationTypes.get_type_model(tp)
|
||||
|
@ -54,8 +60,12 @@ def record_task_activity_callback(self, record_ids, *args, **kwargs):
|
|||
|
||||
|
||||
@shared_task(
|
||||
queue='ansible', verbose_name=_('Execute automation record'),
|
||||
activity_callback=record_task_activity_callback
|
||||
queue='ansible',
|
||||
verbose_name=_('Execute automation record'),
|
||||
activity_callback=record_task_activity_callback,
|
||||
description=_(
|
||||
"""When manually executing password change records, this task is used"""
|
||||
)
|
||||
)
|
||||
def execute_automation_record_task(record_ids, tp):
|
||||
from accounts.models import ChangeSecretRecord
|
||||
|
@ -84,7 +94,16 @@ def execute_automation_record_task(record_ids, tp):
|
|||
|
||||
@shared_task(
|
||||
verbose_name=_('Clean change secret and push record period'),
|
||||
description=_('Clean change secret and push record period description')
|
||||
description=_(
|
||||
"""The system will periodically clean up unnecessary password change and push records,
|
||||
including their associated change tasks, execution logs, assets, and accounts. When any
|
||||
of these associated items are deleted, the corresponding password change and push records
|
||||
become invalid. Therefore, to maintain a clean and efficient database, the system will
|
||||
clean up expired records at 2 a.m daily, based on the interval specified by
|
||||
PERM_EXPIRED_CHECK_PERIODIC in the config.txt configuration file. This periodic cleanup
|
||||
mechanism helps free up storage space and enhances the security and overall performance
|
||||
of data management"""
|
||||
)
|
||||
)
|
||||
@register_as_period_task(crontab=CRONTAB_AT_AM_THREE)
|
||||
def clean_change_secret_and_push_record_period():
|
||||
|
|
|
@ -22,7 +22,13 @@ def task_activity_callback(self, pid, trigger, *args, **kwargs):
|
|||
return resource_ids, org_id
|
||||
|
||||
|
||||
@shared_task(verbose_name=_('Execute account backup plan'), activity_callback=task_activity_callback)
|
||||
@shared_task(
|
||||
verbose_name=_('Execute account backup plan'),
|
||||
activity_callback=task_activity_callback,
|
||||
description=_(
|
||||
"When performing scheduled or manual account backups, this task is used"
|
||||
)
|
||||
)
|
||||
def execute_account_backup_task(pid, trigger, **kwargs):
|
||||
from accounts.models import AccountBackupAutomation
|
||||
with tmp_to_root_org():
|
||||
|
|
|
@ -26,8 +26,10 @@ def gather_asset_accounts_util(nodes, task_name):
|
|||
|
||||
|
||||
@shared_task(
|
||||
queue="ansible", verbose_name=_('Gather asset accounts'),
|
||||
activity_callback=lambda self, node_ids, task_name=None, *args, **kwargs: (node_ids, None)
|
||||
queue="ansible",
|
||||
verbose_name=_('Gather asset accounts'),
|
||||
activity_callback=lambda self, node_ids, task_name=None, *args, **kwargs: (node_ids, None),
|
||||
description=_("Unused")
|
||||
)
|
||||
def gather_asset_accounts_task(node_ids, task_name=None):
|
||||
if task_name is None:
|
||||
|
|
|
@ -12,8 +12,12 @@ __all__ = [
|
|||
|
||||
|
||||
@shared_task(
|
||||
queue="ansible", verbose_name=_('Push accounts to assets'),
|
||||
activity_callback=lambda self, account_ids, *args, **kwargs: (account_ids, None)
|
||||
queue="ansible",
|
||||
verbose_name=_('Push accounts to assets'),
|
||||
activity_callback=lambda self, account_ids, *args, **kwargs: (account_ids, None),
|
||||
description=_(
|
||||
"When creating or modifying an account requires account push, this task is executed"
|
||||
)
|
||||
)
|
||||
def push_accounts_to_assets_task(account_ids, params=None):
|
||||
from accounts.models import PushAccountAutomation
|
||||
|
|
|
@ -21,8 +21,13 @@ __all__ = ['remove_accounts_task']
|
|||
|
||||
|
||||
@shared_task(
|
||||
queue="ansible", verbose_name=_('Remove account'),
|
||||
activity_callback=lambda self, gather_account_ids, *args, **kwargs: (gather_account_ids, None)
|
||||
queue="ansible",
|
||||
verbose_name=_('Remove account'),
|
||||
activity_callback=lambda self, gather_account_ids, *args, **kwargs: (gather_account_ids, None),
|
||||
description=_(
|
||||
"""When clicking "Sync deletion" in 'Console - Gather Account - Gathered accounts' this
|
||||
task will be executed"""
|
||||
)
|
||||
)
|
||||
def remove_accounts_task(gather_account_ids):
|
||||
from accounts.models import GatheredAccount
|
||||
|
@ -41,7 +46,15 @@ def remove_accounts_task(gather_account_ids):
|
|||
quickstart_automation_by_snapshot(task_name, tp, task_snapshot)
|
||||
|
||||
|
||||
@shared_task(verbose_name=_('Clean historical accounts'))
|
||||
@shared_task(
|
||||
verbose_name=_('Clean historical accounts'),
|
||||
description=_(
|
||||
"""Each time an asset account is updated, a historical account is generated, so it is
|
||||
necessary to clean up the asset account history. The system will clean up excess account
|
||||
records at 2 a.m. daily based on the configuration in the "System settings - Features -
|
||||
Account storage - Record limit"""
|
||||
)
|
||||
)
|
||||
@register_as_period_task(crontab=CRONTAB_AT_AM_TWO)
|
||||
@tmp_to_root_org()
|
||||
def clean_historical_accounts():
|
||||
|
|
|
@ -9,7 +9,11 @@ from orgs.utils import tmp_to_root_org, tmp_to_org
|
|||
|
||||
@shared_task(
|
||||
verbose_name=_('Template sync info to related accounts'),
|
||||
activity_callback=lambda self, template_id, *args, **kwargs: (template_id, None)
|
||||
activity_callback=lambda self, template_id, *args, **kwargs: (template_id, None),
|
||||
description=_(
|
||||
"""When clicking 'Sync new secret to accounts' in 'Console - Account - Templates -
|
||||
Accounts' this task will be executed"""
|
||||
)
|
||||
)
|
||||
def template_sync_related_accounts(template_id, user_id=None):
|
||||
from accounts.models import Account, AccountTemplate
|
||||
|
|
|
@ -28,7 +28,12 @@ def sync_instance(instance):
|
|||
return "succeeded", msg
|
||||
|
||||
|
||||
@shared_task(verbose_name=_('Sync secret to vault'))
|
||||
@shared_task(
|
||||
verbose_name=_('Sync secret to vault'),
|
||||
description=_(
|
||||
"When clicking 'Sync' in 'System Settings - Features - Account Storage' this task will be executed"
|
||||
)
|
||||
)
|
||||
def sync_secret_to_vault():
|
||||
if not vault_client.enabled:
|
||||
# 这里不能判断 settings.VAULT_ENABLED, 必须判断当前 vault_client 的类型
|
||||
|
|
|
@ -46,8 +46,12 @@ def verify_accounts_connectivity_util(accounts, task_name):
|
|||
|
||||
|
||||
@shared_task(
|
||||
queue="ansible", verbose_name=_('Verify asset account availability'),
|
||||
activity_callback=lambda self, account_ids, *args, **kwargs: (account_ids, None)
|
||||
queue="ansible",
|
||||
verbose_name=_('Verify asset account availability'),
|
||||
activity_callback=lambda self, account_ids, *args, **kwargs: (account_ids, None),
|
||||
description=_(
|
||||
"When clicking 'Test' in 'Console - Asset details - Accounts' this task will be executed"
|
||||
)
|
||||
)
|
||||
def verify_accounts_connectivity_task(account_ids):
|
||||
from accounts.models import Account, VerifyAccountAutomation
|
||||
|
|
|
@ -1,4 +1,5 @@
|
|||
from django.db.models import Count
|
||||
from django_filters import rest_framework as filters
|
||||
from rest_framework import generics
|
||||
from rest_framework import serializers
|
||||
from rest_framework.decorators import action
|
||||
|
@ -14,6 +15,14 @@ from common.serializers import GroupedChoiceSerializer
|
|||
__all__ = ['AssetPlatformViewSet', 'PlatformAutomationMethodsApi', 'PlatformProtocolViewSet']
|
||||
|
||||
|
||||
class PlatformFilter(filters.FilterSet):
|
||||
name__startswith = filters.CharFilter(field_name='name', lookup_expr='istartswith')
|
||||
|
||||
class Meta:
|
||||
model = Platform
|
||||
fields = ['name', 'category', 'type']
|
||||
|
||||
|
||||
class AssetPlatformViewSet(JMSModelViewSet):
|
||||
queryset = Platform.objects.all()
|
||||
serializer_classes = {
|
||||
|
@ -21,7 +30,7 @@ class AssetPlatformViewSet(JMSModelViewSet):
|
|||
'list': PlatformListSerializer,
|
||||
'categories': GroupedChoiceSerializer,
|
||||
}
|
||||
filterset_fields = ['name', 'category', 'type']
|
||||
filterset_class = PlatformFilter
|
||||
search_fields = ['name']
|
||||
ordering = ['-internal', 'name']
|
||||
rbac_perms = {
|
||||
|
|
|
@ -170,6 +170,7 @@ class BasePlaybookManager:
|
|||
result = self.write_cert_to_file(
|
||||
os.path.join(cert_dir, f), specific.get(f)
|
||||
)
|
||||
os.chmod(result, 0o600)
|
||||
host['jms_asset']['secret_info'][f] = result
|
||||
return host
|
||||
|
||||
|
|
|
@ -3,6 +3,9 @@
|
|||
vars:
|
||||
ansible_python_interpreter: /opt/py3/bin/python
|
||||
check_ssl: "{{ jms_asset.spec_info.use_ssl and not jms_asset.spec_info.allow_invalid_cert }}"
|
||||
ca_cert: "{{ jms_asset.secret_info.ca_cert | default('') }}"
|
||||
ssl_cert: "{{ jms_asset.secret_info.client_cert | default('') }}"
|
||||
ssl_key: "{{ jms_asset.secret_info.client_key | default('') }}"
|
||||
|
||||
tasks:
|
||||
- name: Get info
|
||||
|
@ -12,9 +15,9 @@
|
|||
login_host: "{{ jms_asset.address }}"
|
||||
login_port: "{{ jms_asset.port }}"
|
||||
check_hostname: "{{ check_ssl if check_ssl else omit }}"
|
||||
ca_cert: "{{ jms_asset.secret_info.ca_cert | default(omit) if check_ssl else omit }}"
|
||||
client_cert: "{{ jms_asset.secret_info.client_cert | default(omit) if check_ssl else omit }}"
|
||||
client_key: "{{ jms_asset.secret_info.client_key | default(omit) if check_ssl else omit }}"
|
||||
ca_cert: "{{ ca_cert if check_ssl and ca_cert | length > 0 else omit }}"
|
||||
client_cert: "{{ ssl_cert if check_ssl and ssl_cert | length > 0 else omit }}"
|
||||
client_key: "{{ ssl_key if check_ssl and ssl_key | length > 0 else omit }}"
|
||||
filter: version
|
||||
register: db_info
|
||||
|
||||
|
|
|
@ -2,6 +2,10 @@
|
|||
gather_facts: no
|
||||
vars:
|
||||
ansible_python_interpreter: /opt/py3/bin/python
|
||||
check_ssl: "{{ jms_asset.spec_info.use_ssl }}"
|
||||
ca_cert: "{{ jms_asset.secret_info.ca_cert | default('') }}"
|
||||
ssl_cert: "{{ jms_asset.secret_info.client_cert | default('') }}"
|
||||
ssl_key: "{{ jms_asset.secret_info.client_key | default('') }}"
|
||||
|
||||
tasks:
|
||||
- name: Get info
|
||||
|
@ -11,6 +15,10 @@
|
|||
login_host: "{{ jms_asset.address }}"
|
||||
login_port: "{{ jms_asset.port }}"
|
||||
login_db: "{{ jms_asset.spec_info.db_name }}"
|
||||
ca_cert: "{{ ca_cert if check_ssl and ca_cert | length > 0 else omit }}"
|
||||
ssl_cert: "{{ ssl_cert if check_ssl and ssl_cert | length > 0 else omit }}"
|
||||
ssl_key: "{{ ssl_key if check_ssl and ssl_key | length > 0 else omit }}"
|
||||
ssl_mode: "{{ jms_asset.spec_info.pg_ssl_mode }}"
|
||||
register: db_info
|
||||
|
||||
- name: Define info by set_fact
|
||||
|
|
|
@ -3,6 +3,9 @@
|
|||
vars:
|
||||
ansible_python_interpreter: /opt/py3/bin/python
|
||||
check_ssl: "{{ jms_asset.spec_info.use_ssl and not jms_asset.spec_info.allow_invalid_cert }}"
|
||||
ca_cert: "{{ jms_asset.secret_info.ca_cert | default('') }}"
|
||||
ssl_cert: "{{ jms_asset.secret_info.client_cert | default('') }}"
|
||||
ssl_key: "{{ jms_asset.secret_info.client_key | default('') }}"
|
||||
|
||||
tasks:
|
||||
- name: Test MySQL connection
|
||||
|
@ -12,7 +15,7 @@
|
|||
login_host: "{{ jms_asset.address }}"
|
||||
login_port: "{{ jms_asset.port }}"
|
||||
check_hostname: "{{ check_ssl if check_ssl else omit }}"
|
||||
ca_cert: "{{ jms_asset.secret_info.ca_cert | default(omit) if check_ssl else omit }}"
|
||||
client_cert: "{{ jms_asset.secret_info.client_cert | default(omit) if check_ssl else omit }}"
|
||||
client_key: "{{ jms_asset.secret_info.client_key | default(omit) if check_ssl else omit }}"
|
||||
ca_cert: "{{ ca_cert if check_ssl and ca_cert | length > 0 else omit }}"
|
||||
client_cert: "{{ ssl_cert if check_ssl and ssl_cert | length > 0 else omit }}"
|
||||
client_key: "{{ ssl_key if check_ssl and ssl_key | length > 0 else omit }}"
|
||||
filter: version
|
||||
|
|
|
@ -2,6 +2,10 @@
|
|||
gather_facts: no
|
||||
vars:
|
||||
ansible_python_interpreter: /opt/py3/bin/python
|
||||
check_ssl: "{{ jms_asset.spec_info.use_ssl }}"
|
||||
ca_cert: "{{ jms_asset.secret_info.ca_cert | default('') }}"
|
||||
ssl_cert: "{{ jms_asset.secret_info.client_cert | default('') }}"
|
||||
ssl_key: "{{ jms_asset.secret_info.client_key | default('') }}"
|
||||
|
||||
tasks:
|
||||
- name: Test PostgreSQL connection
|
||||
|
@ -11,5 +15,9 @@
|
|||
login_host: "{{ jms_asset.address }}"
|
||||
login_port: "{{ jms_asset.port }}"
|
||||
login_db: "{{ jms_asset.spec_info.db_name }}"
|
||||
ca_cert: "{{ ca_cert if check_ssl and ca_cert | length > 0 else omit }}"
|
||||
ssl_cert: "{{ ssl_cert if check_ssl and ssl_cert | length > 0 else omit }}"
|
||||
ssl_key: "{{ ssl_key if check_ssl and ssl_key | length > 0 else omit }}"
|
||||
ssl_mode: "{{ jms_asset.spec_info.pg_ssl_mode }}"
|
||||
register: result
|
||||
failed_when: not result.is_available
|
||||
|
|
|
@ -1,6 +1,7 @@
|
|||
from .automation import *
|
||||
from .base import *
|
||||
from .category import *
|
||||
from .database import *
|
||||
from .host import *
|
||||
from .platform import *
|
||||
from .protocol import *
|
||||
|
|
|
@ -1,3 +1,5 @@
|
|||
from django.db.models import TextChoices
|
||||
|
||||
from .base import BaseType
|
||||
|
||||
|
||||
|
@ -120,3 +122,10 @@ class DatabaseTypes(BaseType):
|
|||
cls.MYSQL, cls.MARIADB, cls.POSTGRESQL,
|
||||
cls.MONGODB, cls.REDIS,
|
||||
]
|
||||
|
||||
|
||||
class PostgresqlSSLMode(TextChoices):
|
||||
PREFER = 'prefer', 'Prefer'
|
||||
REQUIRE = 'require', 'Require'
|
||||
VERIFY_CA = 'verify-ca', 'Verify CA'
|
||||
VERIFY_FULL = 'verify-full', 'Verify Full'
|
||||
|
|
|
@ -45,6 +45,12 @@ class Protocol(ChoicesMixin, models.TextChoices):
|
|||
'default': False,
|
||||
'label': _('Old SSH version'),
|
||||
'help_text': _('Old SSH version like openssh 5.x or 6.x')
|
||||
},
|
||||
'nc': {
|
||||
'type': 'bool',
|
||||
'default': False,
|
||||
'label': 'Netcat (nc)',
|
||||
'help_text': _('Netcat help text')
|
||||
}
|
||||
}
|
||||
},
|
||||
|
|
|
@ -0,0 +1,23 @@
|
|||
# Generated by Django 4.1.13 on 2024-09-13 08:22
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
dependencies = [
|
||||
('assets', '0005_myasset'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name='database',
|
||||
name='pg_ssl_mode',
|
||||
field=models.CharField(choices=[
|
||||
('prefer', 'Prefer'),
|
||||
('require', 'Require'),
|
||||
('verify-ca', 'Verify CA'),
|
||||
('verify-full', 'Verify Full')
|
||||
], default='prefer',
|
||||
max_length=16, verbose_name='Postgresql SSL mode'),
|
||||
),
|
||||
]
|
|
@ -1,6 +1,7 @@
|
|||
from django.db import models
|
||||
from django.utils.translation import gettext_lazy as _
|
||||
|
||||
from assets.const import PostgresqlSSLMode
|
||||
from common.db.fields import EncryptTextField
|
||||
from .common import Asset
|
||||
|
||||
|
@ -12,6 +13,10 @@ class Database(Asset):
|
|||
client_cert = EncryptTextField(verbose_name=_("Client cert"), blank=True)
|
||||
client_key = EncryptTextField(verbose_name=_("Client key"), blank=True)
|
||||
allow_invalid_cert = models.BooleanField(default=False, verbose_name=_('Allow invalid cert'))
|
||||
pg_ssl_mode = models.CharField(
|
||||
max_length=16, choices=PostgresqlSSLMode.choices,
|
||||
default=PostgresqlSSLMode.PREFER, verbose_name=_('Postgresql SSL mode')
|
||||
)
|
||||
|
||||
def __str__(self):
|
||||
return '{}({}://{}/{})'.format(self.name, self.type, self.address, self.db_name)
|
||||
|
|
|
@ -16,7 +16,7 @@ __all__ = ['Gateway']
|
|||
class GatewayManager(OrgManager):
|
||||
def get_queryset(self):
|
||||
queryset = super().get_queryset()
|
||||
queryset = queryset.filter(platform__name=GATEWAY_NAME)
|
||||
queryset = queryset.filter(platform__name__startswith=GATEWAY_NAME)
|
||||
return queryset
|
||||
|
||||
def bulk_create(self, objs, batch_size=None, ignore_conflicts=False):
|
||||
|
|
|
@ -31,6 +31,12 @@ __all__ = [
|
|||
class AssetProtocolsSerializer(serializers.ModelSerializer):
|
||||
port = serializers.IntegerField(required=False, allow_null=True, max_value=65535, min_value=0)
|
||||
|
||||
def get_render_help_text(self):
|
||||
if self.parent and self.parent.many:
|
||||
return _('Protocols, format is ["protocol/port"]')
|
||||
else:
|
||||
return _('Protocol, format is name/port')
|
||||
|
||||
def to_file_representation(self, data):
|
||||
return '{name}/{port}'.format(**data)
|
||||
|
||||
|
@ -97,6 +103,9 @@ class AssetAccountSerializer(AccountSerializer):
|
|||
attrs = super().validate(attrs)
|
||||
return self.set_secret(attrs)
|
||||
|
||||
def get_render_help_text(self):
|
||||
return _('Accounts, format [{"name": "x", "username": "x", "secret": "x", "secret_type": "password"}]')
|
||||
|
||||
class Meta(AccountSerializer.Meta):
|
||||
fields = [
|
||||
f for f in AccountSerializer.Meta.fields
|
||||
|
@ -121,12 +130,23 @@ class AccountSecretSerializer(SecretReadableMixin, CommonModelSerializer):
|
|||
}
|
||||
|
||||
|
||||
class NodeDisplaySerializer(serializers.ListField):
|
||||
def get_render_help_text(self):
|
||||
return _('Node path, format ["/org_name/node_name"], if node not exist, will create it')
|
||||
|
||||
def to_internal_value(self, data):
|
||||
return data
|
||||
|
||||
def to_representation(self, data):
|
||||
return data
|
||||
|
||||
|
||||
class AssetSerializer(BulkOrgResourceModelSerializer, ResourceLabelsMixin, WritableNestedModelSerializer):
|
||||
category = LabeledChoiceField(choices=Category.choices, read_only=True, label=_('Category'))
|
||||
type = LabeledChoiceField(choices=AllTypes.choices(), read_only=True, label=_('Type'))
|
||||
protocols = AssetProtocolsSerializer(many=True, required=False, label=_('Protocols'), default=())
|
||||
accounts = AssetAccountSerializer(many=True, required=False, allow_null=True, write_only=True, label=_('Accounts'))
|
||||
nodes_display = serializers.ListField(read_only=False, required=False, label=_("Node path"))
|
||||
nodes_display = NodeDisplaySerializer(read_only=False, required=False, label=_("Node path"))
|
||||
_accounts = None
|
||||
|
||||
class Meta:
|
||||
|
|
|
@ -16,9 +16,14 @@ class DatabaseSerializer(AssetSerializer):
|
|||
model = Database
|
||||
extra_fields = [
|
||||
'db_name', 'use_ssl', 'ca_cert', 'client_cert',
|
||||
'client_key', 'allow_invalid_cert'
|
||||
'client_key', 'allow_invalid_cert', 'pg_ssl_mode'
|
||||
]
|
||||
fields = AssetSerializer.Meta.fields + extra_fields
|
||||
extra_kwargs = {
|
||||
'ca_cert': {'help_text': _('CA cert help text')},
|
||||
'pg_ssl_mode': {'help_text': _('Postgresql ssl model help text')},
|
||||
}
|
||||
extra_kwargs.update(AssetSerializer.Meta.extra_kwargs)
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
super().__init__(*args, **kwargs)
|
||||
|
|
|
@ -14,6 +14,11 @@ class GatewaySerializer(HostSerializer):
|
|||
class Meta(HostSerializer.Meta):
|
||||
model = Gateway
|
||||
|
||||
def validate_platform(self, p):
|
||||
if not p.name.startswith('Gateway'):
|
||||
raise serializers.ValidationError(_('The platform must start with Gateway'))
|
||||
return p
|
||||
|
||||
def validate_name(self, value):
|
||||
queryset = Asset.objects.filter(name=value)
|
||||
if self.instance:
|
||||
|
|
|
@ -21,8 +21,10 @@ def task_activity_callback(self, pid, trigger, tp, *args, **kwargs):
|
|||
|
||||
|
||||
@shared_task(
|
||||
queue='ansible', verbose_name=_('Asset execute automation'),
|
||||
activity_callback=task_activity_callback
|
||||
queue='ansible',
|
||||
verbose_name=_('Asset execute automation'),
|
||||
activity_callback=task_activity_callback,
|
||||
description=_("Unused")
|
||||
)
|
||||
def execute_asset_automation_task(pid, trigger, tp):
|
||||
model = AutomationTypes.get_type_model(tp)
|
||||
|
|
|
@ -18,8 +18,13 @@ __all__ = [
|
|||
|
||||
|
||||
@shared_task(
|
||||
queue="ansible", verbose_name=_('Gather assets facts'),
|
||||
activity_callback=lambda self, asset_ids, org_id, *args, **kwargs: (asset_ids, org_id)
|
||||
queue="ansible",
|
||||
verbose_name=_('Gather assets facts'),
|
||||
activity_callback=lambda self, asset_ids, org_id, *args, **kwargs: (asset_ids, org_id),
|
||||
description=_(
|
||||
"""When clicking 'Refresh hardware info' in 'Console - Asset Details - Basic' this task
|
||||
will be executed"""
|
||||
)
|
||||
)
|
||||
def gather_assets_facts_task(asset_ids, org_id, task_name=None):
|
||||
from assets.models import GatherFactsAutomation
|
||||
|
|
|
@ -1,19 +1,25 @@
|
|||
from celery import shared_task
|
||||
from django.utils.translation import gettext_lazy as _
|
||||
|
||||
from assets.utils import check_node_assets_amount
|
||||
from common.const.crontab import CRONTAB_AT_AM_TWO
|
||||
from common.utils import get_logger
|
||||
from common.utils.lock import AcquireFailed
|
||||
from ops.celery.decorator import register_as_period_task
|
||||
from orgs.models import Organization
|
||||
from orgs.utils import tmp_to_org
|
||||
from ops.celery.decorator import register_as_period_task
|
||||
from assets.utils import check_node_assets_amount
|
||||
|
||||
from common.utils.lock import AcquireFailed
|
||||
from common.utils import get_logger
|
||||
from common.const.crontab import CRONTAB_AT_AM_TWO
|
||||
|
||||
logger = get_logger(__file__)
|
||||
|
||||
|
||||
@shared_task(verbose_name=_('Check the amount of assets under the node'))
|
||||
@shared_task(
|
||||
verbose_name=_('Check the amount of assets under the node'),
|
||||
description=_(
|
||||
"""Manually verifying asset quantities updates the asset count for nodes under the
|
||||
current organization. This task will be called in the following two cases: when updating
|
||||
nodes and when the number of nodes exceeds 100"""
|
||||
)
|
||||
)
|
||||
def check_node_assets_amount_task(org_id=None):
|
||||
if org_id is None:
|
||||
orgs = Organization.objects.all()
|
||||
|
@ -30,7 +36,13 @@ def check_node_assets_amount_task(org_id=None):
|
|||
logger.error(error)
|
||||
|
||||
|
||||
@shared_task(verbose_name=_('Periodic check the amount of assets under the node'))
|
||||
@shared_task(
|
||||
verbose_name=_('Periodic check the amount of assets under the node'),
|
||||
description=_(
|
||||
"""Schedule the check_node_assets_amount_task to periodically update the asset count of
|
||||
all nodes under all organizations"""
|
||||
)
|
||||
)
|
||||
@register_as_period_task(crontab=CRONTAB_AT_AM_TWO)
|
||||
def check_node_assets_amount_period_task():
|
||||
check_node_assets_amount_task()
|
||||
|
|
|
@ -17,8 +17,12 @@ __all__ = [
|
|||
|
||||
|
||||
@shared_task(
|
||||
verbose_name=_('Test assets connectivity'), queue='ansible',
|
||||
activity_callback=lambda self, asset_ids, org_id, *args, **kwargs: (asset_ids, org_id)
|
||||
verbose_name=_('Test assets connectivity'),
|
||||
queue='ansible',
|
||||
activity_callback=lambda self, asset_ids, org_id, *args, **kwargs: (asset_ids, org_id),
|
||||
description=_(
|
||||
"When clicking 'Test Asset Connectivity' in 'Asset Details - Basic Settings' this task will be executed"
|
||||
)
|
||||
)
|
||||
def test_assets_connectivity_task(asset_ids, org_id, task_name=None):
|
||||
from assets.models import PingAutomation
|
||||
|
|
|
@ -16,8 +16,12 @@ __all__ = [
|
|||
|
||||
|
||||
@shared_task(
|
||||
verbose_name=_('Test gateways connectivity'), queue='ansible',
|
||||
activity_callback=lambda self, asset_ids, org_id, *args, **kwargs: (asset_ids, org_id)
|
||||
verbose_name=_('Test gateways connectivity'),
|
||||
queue='ansible',
|
||||
activity_callback=lambda self, asset_ids, org_id, *args, **kwargs: (asset_ids, org_id),
|
||||
description=_(
|
||||
"When clicking 'Test Connection' in 'Domain Details - Gateway' this task will be executed"
|
||||
)
|
||||
)
|
||||
def test_gateways_connectivity_task(asset_ids, org_id, local_port, task_name=None):
|
||||
from assets.models import PingAutomation
|
||||
|
@ -33,4 +37,5 @@ def test_gateways_connectivity_task(asset_ids, org_id, local_port, task_name=Non
|
|||
def test_gateways_connectivity_manual(gateway_ids, local_port):
|
||||
task_name = gettext_noop("Test gateways connectivity")
|
||||
gateway_ids = [str(i) for i in gateway_ids]
|
||||
return test_gateways_connectivity_task.delay(gateway_ids, str(current_org.id), local_port, task_name)
|
||||
return test_gateways_connectivity_task.delay(gateway_ids, str(current_org.id), local_port,
|
||||
task_name)
|
||||
|
|
|
@ -24,6 +24,7 @@ from .const import (
|
|||
OperateChoices,
|
||||
ActionChoices,
|
||||
ActivityChoices,
|
||||
LogType,
|
||||
LoginTypeChoices,
|
||||
MFAChoices,
|
||||
LoginStatusChoices,
|
||||
|
@ -350,9 +351,8 @@ class LogStorage(CommonStorageModelMixin, JMSBaseModel):
|
|||
log_store.pre_use_check()
|
||||
return True
|
||||
|
||||
@staticmethod
|
||||
def is_use():
|
||||
return False
|
||||
def used_by(self):
|
||||
return [str(LogType(l).label) for l in self.meta.get('LOG_TYPES', [])]
|
||||
|
||||
def save(self, *args, **kwargs):
|
||||
self.is_valid()
|
||||
|
|
|
@ -130,7 +130,15 @@ def clean_expired_session_period():
|
|||
logger.info("Clean session replay done")
|
||||
|
||||
|
||||
@shared_task(verbose_name=_('Clean audits session task log'))
|
||||
@shared_task(
|
||||
verbose_name=_('Clean audits session task log'),
|
||||
description=_(
|
||||
"""Since the system generates login logs, operation logs, file upload logs, activity
|
||||
logs, Celery execution logs, session recordings, command records, and password change
|
||||
logs, it will perform cleanup of records that exceed the time limit according to the
|
||||
'Tasks - Regular clean-up' in the system settings at 2 a.m daily"""
|
||||
)
|
||||
)
|
||||
@register_as_period_task(crontab=CRONTAB_AT_AM_TWO)
|
||||
def clean_audits_log_period():
|
||||
print("Start clean audit session task log")
|
||||
|
@ -144,7 +152,13 @@ def clean_audits_log_period():
|
|||
clean_password_change_log_period()
|
||||
|
||||
|
||||
@shared_task(verbose_name=_('Upload FTP file to external storage'))
|
||||
@shared_task(
|
||||
verbose_name=_('Upload FTP file to external storage'),
|
||||
description=_(
|
||||
"""If SERVER_REPLAY_STORAGE is configured, files uploaded through file management will be
|
||||
synchronized to external storage"""
|
||||
)
|
||||
)
|
||||
def upload_ftp_file_to_external_storage(ftp_log_id, file_name):
|
||||
logger.info(f'Start upload FTP file record to external storage: {ftp_log_id} - {file_name}')
|
||||
ftp_log = get_log_storage(LogType.ftp_log).get_manager().filter(id=ftp_log_id).first()
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
# coding:utf-8
|
||||
#
|
||||
|
||||
import abc
|
||||
import ldap
|
||||
from django.conf import settings
|
||||
from django.core.exceptions import ImproperlyConfigured, ObjectDoesNotExist
|
||||
|
@ -15,13 +15,16 @@ from .base import JMSBaseAuthBackend
|
|||
logger = _LDAPConfig.get_logger()
|
||||
|
||||
|
||||
class LDAPAuthorizationBackend(JMSBaseAuthBackend, LDAPBackend):
|
||||
"""
|
||||
Override this class to override _LDAPUser to LDAPUser
|
||||
"""
|
||||
@staticmethod
|
||||
def is_enabled():
|
||||
return settings.AUTH_LDAP
|
||||
class LDAPBaseBackend(LDAPBackend):
|
||||
|
||||
@abc.abstractmethod
|
||||
def is_enabled(self):
|
||||
raise NotImplementedError('is_enabled')
|
||||
|
||||
@property
|
||||
@abc.abstractmethod
|
||||
def is_user_login_only_in_users(self):
|
||||
raise NotImplementedError('is_authenticated')
|
||||
|
||||
def get_or_build_user(self, username, ldap_user):
|
||||
"""
|
||||
|
@ -56,38 +59,6 @@ class LDAPAuthorizationBackend(JMSBaseAuthBackend, LDAPBackend):
|
|||
|
||||
return user, built
|
||||
|
||||
def pre_check(self, username, password):
|
||||
if not settings.AUTH_LDAP:
|
||||
error = 'Not enabled auth ldap'
|
||||
return False, error
|
||||
if not username:
|
||||
error = 'Username is None'
|
||||
return False, error
|
||||
if not password:
|
||||
error = 'Password is None'
|
||||
return False, error
|
||||
if settings.AUTH_LDAP_USER_LOGIN_ONLY_IN_USERS:
|
||||
user_model = self.get_user_model()
|
||||
exist = user_model.objects.filter(username=username).exists()
|
||||
if not exist:
|
||||
error = 'user ({}) is not in the user list'.format(username)
|
||||
return False, error
|
||||
return True, ''
|
||||
|
||||
def authenticate(self, request=None, username=None, password=None, **kwargs):
|
||||
logger.info('Authentication LDAP backend')
|
||||
if username is None or password is None:
|
||||
logger.info('No username or password')
|
||||
return None
|
||||
match, msg = self.pre_check(username, password)
|
||||
if not match:
|
||||
logger.info('Authenticate failed: {}'.format(msg))
|
||||
return None
|
||||
ldap_user = LDAPUser(self, username=username.strip(), request=request)
|
||||
user = self.authenticate_ldap_user(ldap_user, password)
|
||||
logger.info('Authenticate user: {}'.format(user))
|
||||
return user if self.user_can_authenticate(user) else None
|
||||
|
||||
def get_user(self, user_id):
|
||||
user = None
|
||||
try:
|
||||
|
@ -111,6 +82,67 @@ class LDAPAuthorizationBackend(JMSBaseAuthBackend, LDAPBackend):
|
|||
user = ldap_user.populate_user()
|
||||
return user
|
||||
|
||||
def authenticate(self, request=None, username=None, password=None, **kwargs):
|
||||
logger.info('Authentication LDAP backend')
|
||||
if username is None or password is None:
|
||||
logger.info('No username or password')
|
||||
return None
|
||||
match, msg = self.pre_check(username, password)
|
||||
if not match:
|
||||
logger.info('Authenticate failed: {}'.format(msg))
|
||||
return None
|
||||
ldap_user = LDAPUser(self, username=username.strip(), request=request)
|
||||
user = self.authenticate_ldap_user(ldap_user, password)
|
||||
logger.info('Authenticate user: {}'.format(user))
|
||||
return user if self.user_can_authenticate(user) else None
|
||||
|
||||
def pre_check(self, username, password):
|
||||
if not self.is_enabled():
|
||||
error = 'Not enabled auth ldap'
|
||||
return False, error
|
||||
if not username:
|
||||
error = 'Username is None'
|
||||
return False, error
|
||||
if not password:
|
||||
error = 'Password is None'
|
||||
return False, error
|
||||
if self.is_user_login_only_in_users:
|
||||
user_model = self.get_user_model()
|
||||
exist = user_model.objects.filter(username=username).exists()
|
||||
if not exist:
|
||||
error = 'user ({}) is not in the user list'.format(username)
|
||||
return False, error
|
||||
return True, ''
|
||||
|
||||
|
||||
class LDAPAuthorizationBackend(JMSBaseAuthBackend, LDAPBaseBackend):
|
||||
"""
|
||||
Override this class to override _LDAPUser to LDAPUser
|
||||
"""
|
||||
|
||||
@staticmethod
|
||||
def is_enabled():
|
||||
return settings.AUTH_LDAP
|
||||
|
||||
@property
|
||||
def is_user_login_only_in_users(self):
|
||||
return settings.AUTH_LDAP_USER_LOGIN_ONLY_IN_USERS
|
||||
|
||||
|
||||
class LDAPHAAuthorizationBackend(JMSBaseAuthBackend, LDAPBaseBackend):
|
||||
"""
|
||||
Override this class to override _LDAPUser to LDAPUser
|
||||
"""
|
||||
settings_prefix = "AUTH_LDAP_HA_"
|
||||
|
||||
@staticmethod
|
||||
def is_enabled():
|
||||
return settings.AUTH_LDAP_HA
|
||||
|
||||
@property
|
||||
def is_user_login_only_in_users(self):
|
||||
return settings.AUTH_LDAP_HA_USER_LOGIN_ONLY_IN_USERS
|
||||
|
||||
|
||||
class LDAPUser(_LDAPUser):
|
||||
|
||||
|
@ -126,13 +158,18 @@ class LDAPUser(_LDAPUser):
|
|||
configuration in the settings.py file
|
||||
is configured with a `lambda` problem value
|
||||
"""
|
||||
|
||||
if isinstance(self.backend, LDAPAuthorizationBackend):
|
||||
search_filter = settings.AUTH_LDAP_SEARCH_FILTER
|
||||
search_ou = settings.AUTH_LDAP_SEARCH_OU
|
||||
else:
|
||||
search_filter = settings.AUTH_LDAP_HA_SEARCH_FILTER
|
||||
search_ou = settings.AUTH_LDAP_HA_SEARCH_OU
|
||||
user_search_union = [
|
||||
LDAPSearch(
|
||||
USER_SEARCH, ldap.SCOPE_SUBTREE,
|
||||
settings.AUTH_LDAP_SEARCH_FILTER
|
||||
search_filter
|
||||
)
|
||||
for USER_SEARCH in str(settings.AUTH_LDAP_SEARCH_OU).split("|")
|
||||
for USER_SEARCH in str(search_ou).split("|")
|
||||
]
|
||||
|
||||
search = LDAPSearchUnion(*user_search_union)
|
||||
|
@ -169,7 +206,8 @@ class LDAPUser(_LDAPUser):
|
|||
else:
|
||||
value = is_true(value)
|
||||
except LookupError:
|
||||
logger.warning("{} does not have a value for the attribute {}".format(self.dn, attr))
|
||||
logger.warning(
|
||||
"{} does not have a value for the attribute {}".format(self.dn, attr))
|
||||
else:
|
||||
if not hasattr(self._user, field):
|
||||
continue
|
||||
|
|
|
@ -2,13 +2,19 @@
|
|||
#
|
||||
|
||||
from celery import shared_task
|
||||
from ops.celery.decorator import register_as_period_task
|
||||
from django.contrib.sessions.models import Session
|
||||
from django.utils import timezone
|
||||
from django.utils.translation import gettext_lazy as _
|
||||
|
||||
from ops.celery.decorator import register_as_period_task
|
||||
|
||||
@shared_task(verbose_name=_('Clean expired session'))
|
||||
|
||||
@shared_task(
|
||||
verbose_name=_('Clean expired session'),
|
||||
description=_(
|
||||
"Since user logins create sessions, the system will clean up expired sessions every 24 hours"
|
||||
)
|
||||
)
|
||||
@register_as_period_task(interval=3600 * 24)
|
||||
def clean_django_sessions():
|
||||
Session.objects.filter(expire_date__lt=timezone.now()).delete()
|
||||
|
|
|
@ -38,9 +38,15 @@ class SuggestionMixin:
|
|||
class RenderToJsonMixin:
|
||||
@action(methods=[POST, PUT], detail=False, url_path='render-to-json')
|
||||
def render_to_json(self, request: Request, *args, **kwargs):
|
||||
rows = request.data
|
||||
if rows and isinstance(rows[0], dict):
|
||||
first = list(rows[0].values())[0]
|
||||
if first.startswith('#Help'):
|
||||
rows.pop(0)
|
||||
|
||||
data = {
|
||||
'title': (),
|
||||
'data': request.data,
|
||||
'data': rows,
|
||||
}
|
||||
|
||||
jms_context = getattr(request, 'jms_context', {})
|
||||
|
|
|
@ -119,8 +119,6 @@ class BaseFileParser(BaseParser):
|
|||
value = field.to_file_internal_value(value)
|
||||
elif isinstance(field, serializers.BooleanField):
|
||||
value = value.lower() in ['true', '1', 'yes']
|
||||
elif isinstance(field, serializers.ChoiceField):
|
||||
value = value
|
||||
elif isinstance(field, ObjectRelatedField):
|
||||
if field.many:
|
||||
value = [self.id_name_to_obj(v) for v in value]
|
||||
|
@ -164,6 +162,15 @@ class BaseFileParser(BaseParser):
|
|||
data.append(row_data)
|
||||
return data
|
||||
|
||||
@staticmethod
|
||||
def pop_help_text_if_need(rows):
|
||||
rows = list(rows)
|
||||
if not rows:
|
||||
return rows
|
||||
if rows[0][0] == '#Help':
|
||||
rows.pop(0)
|
||||
return rows
|
||||
|
||||
def parse(self, stream, media_type=None, parser_context=None):
|
||||
assert parser_context is not None, '`parser_context` should not be `None`'
|
||||
|
||||
|
@ -192,6 +199,7 @@ class BaseFileParser(BaseParser):
|
|||
request.jms_context = {}
|
||||
request.jms_context['column_title_field_pairs'] = column_title_field_pairs
|
||||
|
||||
rows = self.pop_help_text_if_need(rows)
|
||||
data = self.generate_data(field_names, rows)
|
||||
return data
|
||||
except Exception as e:
|
||||
|
|
|
@ -5,12 +5,13 @@ from datetime import datetime
|
|||
|
||||
import pyzipper
|
||||
from django.conf import settings
|
||||
from django.utils import timezone
|
||||
from django.utils.translation import gettext_lazy as _
|
||||
from rest_framework import serializers
|
||||
from rest_framework.renderers import BaseRenderer
|
||||
from rest_framework.utils import encoders, json
|
||||
|
||||
from common.serializers.fields import ObjectRelatedField, LabeledChoiceField
|
||||
from common.serializers import fields as common_fields
|
||||
from common.utils import get_logger
|
||||
|
||||
logger = get_logger(__file__)
|
||||
|
@ -38,8 +39,10 @@ class BaseFileRenderer(BaseRenderer):
|
|||
filename_prefix = serializer.Meta.model.__name__.lower()
|
||||
else:
|
||||
filename_prefix = 'download'
|
||||
now = datetime.now().strftime("%Y-%m-%d_%H-%M-%S")
|
||||
filename = "{}_{}.{}".format(filename_prefix, now, self.format)
|
||||
suffix = datetime.now().strftime("%Y-%m-%d_%H-%M-%S")
|
||||
if self.template == 'import':
|
||||
suffix = 'template'
|
||||
filename = "{}_{}.{}".format(filename_prefix, suffix, self.format)
|
||||
disposition = 'attachment; filename="{}"'.format(filename)
|
||||
response['Content-Disposition'] = disposition
|
||||
|
||||
|
@ -105,10 +108,10 @@ class BaseFileRenderer(BaseRenderer):
|
|||
value = field.to_file_representation(value)
|
||||
elif isinstance(value, bool):
|
||||
value = 'Yes' if value else 'No'
|
||||
elif isinstance(field, LabeledChoiceField):
|
||||
elif isinstance(field, common_fields.LabeledChoiceField):
|
||||
value = value or {}
|
||||
value = '{}({})'.format(value.get('label'), value.get('value'))
|
||||
elif isinstance(field, ObjectRelatedField):
|
||||
elif isinstance(field, common_fields.ObjectRelatedField):
|
||||
if field.many:
|
||||
value = [self.to_id_name(v) for v in value]
|
||||
else:
|
||||
|
@ -126,6 +129,53 @@ class BaseFileRenderer(BaseRenderer):
|
|||
value = json.dumps(value, cls=encoders.JSONEncoder, ensure_ascii=False)
|
||||
return str(value)
|
||||
|
||||
def get_field_help_text(self, field):
|
||||
text = ''
|
||||
if hasattr(field, 'get_render_help_text'):
|
||||
text = field.get_render_help_text()
|
||||
elif isinstance(field, serializers.BooleanField):
|
||||
text = _('Yes/No')
|
||||
elif isinstance(field, serializers.CharField):
|
||||
if field.max_length:
|
||||
text = _('Text, max length {}').format(field.max_length)
|
||||
else:
|
||||
text = _("Long text, no length limit")
|
||||
elif isinstance(field, serializers.IntegerField):
|
||||
text = _('Number, min {} max {}').format(field.min_value, field.max_value)
|
||||
text = text.replace('min None', '').replace('max None', '')
|
||||
elif isinstance(field, serializers.DateTimeField):
|
||||
text = _('Datetime format {}').format(timezone.now().strftime(settings.REST_FRAMEWORK['DATETIME_FORMAT']))
|
||||
elif isinstance(field, serializers.IPAddressField):
|
||||
text = _('IP')
|
||||
elif isinstance(field, serializers.ChoiceField):
|
||||
choices = [str(v) for v in field.choices.keys()]
|
||||
if isinstance(field, common_fields.LabeledChoiceField):
|
||||
text = _("Choices, format name(value), name is optional for human read,"
|
||||
" value is requisite, options {}").format(','.join(choices))
|
||||
else:
|
||||
text = _("Choices, options {}").format(",".join(choices))
|
||||
elif isinstance(field, common_fields.PhoneField):
|
||||
text = _("Phone number, format +8612345678901")
|
||||
elif isinstance(field, common_fields.LabeledChoiceField):
|
||||
text = _('Label, format ["key:value"]')
|
||||
elif isinstance(field, common_fields.ObjectRelatedField):
|
||||
text = _("Object, format name(id), name is optional for human read, id is requisite")
|
||||
elif isinstance(field, serializers.PrimaryKeyRelatedField):
|
||||
text = _('Object, format id')
|
||||
elif isinstance(field, serializers.ManyRelatedField):
|
||||
child_relation_class_name = field.child_relation.__class__.__name__
|
||||
if child_relation_class_name == "ObjectRelatedField":
|
||||
text = _('Objects, format ["name(id)", ...], name is optional for human read, id is requisite')
|
||||
elif child_relation_class_name == "LabelRelatedField":
|
||||
text = _('Labels, format ["key:value", ...], if label not exists, will create it')
|
||||
else:
|
||||
text = _('Objects, format ["id", ...]')
|
||||
elif isinstance(field, serializers.ListSerializer):
|
||||
child = field.child
|
||||
if hasattr(child, 'get_render_help_text'):
|
||||
text = child.get_render_help_text()
|
||||
return text
|
||||
|
||||
def generate_rows(self, data, render_fields):
|
||||
for item in data:
|
||||
row = []
|
||||
|
@ -135,6 +185,17 @@ class BaseFileRenderer(BaseRenderer):
|
|||
row.append(value)
|
||||
yield row
|
||||
|
||||
def write_help_text_if_need(self):
|
||||
if self.template == 'export':
|
||||
return
|
||||
fields = self.get_rendered_fields()
|
||||
row = []
|
||||
for f in fields:
|
||||
text = self.get_field_help_text(f)
|
||||
row.append(text)
|
||||
row[0] = '#Help ' + str(row[0])
|
||||
self.write_row(row)
|
||||
|
||||
@abc.abstractmethod
|
||||
def initial_writer(self):
|
||||
raise NotImplementedError
|
||||
|
@ -184,6 +245,7 @@ class BaseFileRenderer(BaseRenderer):
|
|||
rows = self.generate_rows(data, rendered_fields)
|
||||
self.initial_writer()
|
||||
self.write_column_titles(column_titles)
|
||||
self.write_help_text_if_need()
|
||||
self.write_rows(rows)
|
||||
self.after_render()
|
||||
value = self.get_rendered_value()
|
||||
|
|
|
@ -2,17 +2,17 @@
|
|||
#
|
||||
|
||||
import codecs
|
||||
|
||||
import unicodecsv
|
||||
from six import BytesIO
|
||||
|
||||
from .base import BaseFileRenderer
|
||||
from ..const import CSV_FILE_ESCAPE_CHARS
|
||||
|
||||
class CSVFileRenderer(BaseFileRenderer):
|
||||
|
||||
class CSVFileRenderer(BaseFileRenderer):
|
||||
media_type = 'text/csv'
|
||||
format = 'csv'
|
||||
|
||||
writer = None
|
||||
buffer = None
|
||||
|
||||
|
|
|
@ -53,8 +53,9 @@ class StorageDestroyModelMixin(DestroyModelMixin):
|
|||
def perform_destroy(self, instance):
|
||||
if instance.type_null_or_server or instance.is_default:
|
||||
raise JMSException(detail=_('Deleting the default storage is not allowed'))
|
||||
if instance.is_use():
|
||||
raise JMSException(detail=_('Cannot delete storage that is being used'))
|
||||
if used_by := instance.used_by():
|
||||
names = ', '.join(list(used_by))
|
||||
raise JMSException(detail=_('Cannot delete storage that is being used: {}').format(names))
|
||||
return super().perform_destroy(instance)
|
||||
|
||||
|
||||
|
|
|
@ -1,10 +1,10 @@
|
|||
import os
|
||||
|
||||
import jms_storage
|
||||
from celery import shared_task
|
||||
from django.conf import settings
|
||||
from django.core.mail import send_mail, EmailMultiAlternatives, get_connection
|
||||
from django.utils.translation import gettext_lazy as _
|
||||
import jms_storage
|
||||
|
||||
from .utils import get_logger
|
||||
|
||||
|
@ -28,7 +28,13 @@ def task_activity_callback(self, subject, message, recipient_list, *args, **kwar
|
|||
return resource_ids,
|
||||
|
||||
|
||||
@shared_task(verbose_name=_("Send email"), activity_callback=task_activity_callback)
|
||||
@shared_task(
|
||||
verbose_name=_("Send email"),
|
||||
activity_callback=task_activity_callback,
|
||||
description=_(
|
||||
"This task will be executed when sending email notifications"
|
||||
)
|
||||
)
|
||||
def send_mail_async(*args, **kwargs):
|
||||
""" Using celery to send email async
|
||||
|
||||
|
@ -55,7 +61,14 @@ def send_mail_async(*args, **kwargs):
|
|||
logger.error("Sending mail error: {}".format(e))
|
||||
|
||||
|
||||
@shared_task(verbose_name=_("Send email attachment"), activity_callback=task_activity_callback)
|
||||
@shared_task(
|
||||
verbose_name=_("Send email attachment"),
|
||||
activity_callback=task_activity_callback,
|
||||
description=_(
|
||||
"""When an account password is changed or an account backup generates attachments,
|
||||
this task needs to be executed for sending emails and handling attachments"""
|
||||
)
|
||||
)
|
||||
def send_mail_attachment_async(subject, message, recipient_list, attachment_list=None):
|
||||
if attachment_list is None:
|
||||
attachment_list = []
|
||||
|
@ -77,7 +90,12 @@ def send_mail_attachment_async(subject, message, recipient_list, attachment_list
|
|||
logger.error("Sending mail attachment error: {}".format(e))
|
||||
|
||||
|
||||
@shared_task(verbose_name=_('Upload session replay to external storage'))
|
||||
@shared_task(
|
||||
verbose_name=_('Upload account backup to external storage'),
|
||||
description=_(
|
||||
"When performing an account backup, this task needs to be executed to external storage (SFTP)"
|
||||
)
|
||||
)
|
||||
def upload_backup_to_obj_storage(recipient, upload_file):
|
||||
logger.info(f'Start upload file : {upload_file}')
|
||||
remote_path = os.path.join('account_backup', os.path.basename(upload_file))
|
||||
|
|
|
@ -427,7 +427,7 @@ def is_macos():
|
|||
def convert_html_to_markdown(html_str):
|
||||
h = html2text.HTML2Text()
|
||||
h.body_width = 0
|
||||
h.ignore_links = True
|
||||
h.ignore_links = False
|
||||
|
||||
markdown = h.handle(html_str)
|
||||
markdown = markdown.replace('\n\n', '\n')
|
||||
|
|
|
@ -13,7 +13,13 @@ from common.utils.random import random_string
|
|||
logger = get_logger(__file__)
|
||||
|
||||
|
||||
@shared_task(verbose_name=_('Send SMS code'))
|
||||
@shared_task(
|
||||
verbose_name=_('Send SMS code'),
|
||||
description=_(
|
||||
"""When resetting a password, forgetting a password, or verifying MFA, this task needs to
|
||||
be executed to send SMS messages"""
|
||||
)
|
||||
)
|
||||
def send_sms_async(target, code):
|
||||
SMS().send_verify_code(target, code)
|
||||
|
||||
|
|
|
@ -67,5 +67,8 @@
|
|||
"Version": "Version",
|
||||
"ViewData": "View data",
|
||||
"WaitCommandReviewMessage": "The review request has been initiated, please wait for the review results",
|
||||
"initializingDatasourceFailedMessage": "Connection failed, please check if the database connection configuration is correct"
|
||||
"initializingDatasourceFailedMessage": "Connection failed, please check if the database connection configuration is correct",
|
||||
"Warning": "Warning",
|
||||
"ExecutionCanceled": "Execution Canceled",
|
||||
"CommandWarningDialogMessage": "The command you executed is risky and an alert notification will be sent to the administrator. Do you want to continue?"
|
||||
}
|
|
@ -9,6 +9,7 @@
|
|||
"CommandReviewMessage": "入力されたコマンドはレビュー後に実行されます。レビューリクエストを送信しますか?",
|
||||
"CommandReviewRejectBy": "コマンドレビューが%sに拒否されました",
|
||||
"CommandReviewTimeoutError": "コマンドレビューがタイムアウトしました",
|
||||
"CommandWarningDialogMessage": "あなたが実行したコマンドにはリスクがあり、警告通知が管理者に送信されます。続行しますか?",
|
||||
"Confirm": "確認",
|
||||
"ConnectError": "接続に失敗しました",
|
||||
"ConnectSuccess": "接続に成功しました",
|
||||
|
@ -22,6 +23,7 @@
|
|||
"ErrorMessage": "エラーメッセージ",
|
||||
"ExecuteError": "実行に失敗しました",
|
||||
"ExecuteSuccess": "実行に成功しました",
|
||||
"ExecutionCanceled": "実行がキャンセルされました",
|
||||
"ExportALL": "すべてのデータをエクスポート",
|
||||
"ExportAll": "すべてエクスポート",
|
||||
"ExportCurrent": "現在のページをエクスポート",
|
||||
|
@ -67,5 +69,6 @@
|
|||
"Version": "バージョン",
|
||||
"ViewData": "データを見る",
|
||||
"WaitCommandReviewMessage": "レビューリクエストが送信されました。レビュー結果をお待ちください",
|
||||
"Warning": "警告",
|
||||
"initializingDatasourceFailedMessage": "接続に失敗しました。データベース接続設定が正しいか確認してください"
|
||||
}
|
|
@ -67,5 +67,8 @@
|
|||
"Version": "版本",
|
||||
"ViewData": "查看数据",
|
||||
"WaitCommandReviewMessage": "复核请求已发起, 请等待复核结果",
|
||||
"initializingDatasourceFailedMessage": "连接失败,请检查数据库连接配置是否正确"
|
||||
"initializingDatasourceFailedMessage": "连接失败,请检查数据库连接配置是否正确",
|
||||
"Warning": "警告",
|
||||
"ExecutionCanceled": "执行已取消",
|
||||
"CommandWarningDialogMessage": "您执行的命令存在风险,告警通知将发送给管理员。是否继续?"
|
||||
}
|
|
@ -9,6 +9,7 @@
|
|||
"CommandReviewMessage": "您輸入的命令需要覆核後才可以執行,是否發起覆核請求?",
|
||||
"CommandReviewRejectBy": "命令覆核被 %s 拒絕",
|
||||
"CommandReviewTimeoutError": "命令覆核超時",
|
||||
"CommandWarningDialogMessage": "您進行的動作存在風險,警告通知將會寄給管理員。你確定要繼續嗎?",
|
||||
"Confirm": "確認",
|
||||
"ConnectError": "連接失敗",
|
||||
"ConnectSuccess": "連接成功",
|
||||
|
@ -22,6 +23,7 @@
|
|||
"ErrorMessage": "錯誤消息",
|
||||
"ExecuteError": "執行失敗",
|
||||
"ExecuteSuccess": "執行成功",
|
||||
"ExecutionCanceled": "動作已取消",
|
||||
"ExportALL": "匯出所有資料",
|
||||
"ExportAll": "匯出全部",
|
||||
"ExportCurrent": "匯出當前頁面",
|
||||
|
@ -67,5 +69,6 @@
|
|||
"Version": "版本",
|
||||
"ViewData": "查看資料",
|
||||
"WaitCommandReviewMessage": "覆核請求已發起,請等待覆核結果",
|
||||
"Warning": "警告。",
|
||||
"initializingDatasourceFailedMessage": "連接失敗,請檢查資料庫連接配置是否正確"
|
||||
}
|
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
|
@ -1,20 +1,28 @@
|
|||
{
|
||||
"ActionPerm": "Actions",
|
||||
"Cancel": "Cancel",
|
||||
"Confirm": "Confirm",
|
||||
"ConfirmBtn": "Confirm",
|
||||
"Connect": "Connect",
|
||||
"CopyLink": "Copy Link Address and Code",
|
||||
"CopyShareURLSuccess": "Copy Share URL Success",
|
||||
"CreateLink": "Create Share Link",
|
||||
"CreateSuccess": "Success",
|
||||
"DownArrow": "Down arrow",
|
||||
"Download": "Download",
|
||||
"DownloadSuccess": "Download success",
|
||||
"EndFileTransfer": "File transfer end",
|
||||
"ExceedTransferSize": "exceed max transfer size",
|
||||
"Expand": "Expand",
|
||||
"ExpiredTime": "Expired",
|
||||
"GetShareUser": "Enter username",
|
||||
"Hotkeys": "Hotkeys",
|
||||
"InputVerifyCode": "Input Verify Code",
|
||||
"JoinShare": "Join Session",
|
||||
"JoinedWithSuccess": "Successfully joined",
|
||||
"KubernetesManagement": "Kubernetes management",
|
||||
"LeaveShare": "Leave Session",
|
||||
"LeftArrow": "Left arrow",
|
||||
"LinkAddr": "Link",
|
||||
"Minute": "Minute",
|
||||
"Minutes": "Minutes",
|
||||
|
@ -22,13 +30,16 @@
|
|||
"MustSelectOneFile": "Must select one file",
|
||||
"NoLink": "No Link",
|
||||
"OnlineUsers": "Online Users",
|
||||
"Paste": "Paste",
|
||||
"PauseSession": "Pause Session",
|
||||
"ReadOnly": "Read-Only",
|
||||
"Refresh": "Refresh",
|
||||
"Remove": "Remove",
|
||||
"Confirm": "Confirm",
|
||||
"RemoveShareUser": "You have been removed from the shared session.",
|
||||
"RemoveShareUserConfirm": "Are you sure to remove the user from the shared session?",
|
||||
"ResumeSession": "Resume Session",
|
||||
"RightArrow": "Right arrow",
|
||||
"Search": "Search",
|
||||
"SelectAction": "Select",
|
||||
"SelectTheme": "Select Theme",
|
||||
"Self": "Self",
|
||||
|
@ -42,6 +53,7 @@
|
|||
"Theme": "Theme",
|
||||
"ThemeColors": "Theme Colors",
|
||||
"ThemeConfig": "Theme",
|
||||
"UpArrow": "Up arrow",
|
||||
"Upload": "Upload",
|
||||
"UploadSuccess": "Upload success",
|
||||
"UploadTips": "Drag file here or click to upload",
|
||||
|
@ -49,8 +61,11 @@
|
|||
"User": "User",
|
||||
"VerifyCode": "Verify Code",
|
||||
"WaitFileTransfer": "Wait file transfer to finish",
|
||||
"WebSocketClosed": "WebSocket closed",
|
||||
"Writable": "Writable",
|
||||
"JoinedWithSuccess": "Successfully joined",
|
||||
"KubernetesManagement": "Kubernetes management",
|
||||
"WebSocketClosed": "WebSocket closed"
|
||||
}
|
||||
"Reconnect": "Reconnect",
|
||||
"Close Current Tab": "Close Current Tab",
|
||||
"Close All Tabs": "Close All Tabs",
|
||||
"Clone Connect": "Clone Connect",
|
||||
"Custom Setting": "Custom Setting"
|
||||
}
|
|
@ -1,20 +1,32 @@
|
|||
{
|
||||
"ActionPerm": "アクション権限",
|
||||
"Cancel": "キャンセル",
|
||||
"Clone Connect": "ウィンドウをコピー",
|
||||
"Close All Tabs": "すべてを閉じる",
|
||||
"Close Current Tab": "現在を閉じる",
|
||||
"Confirm": "確認",
|
||||
"ConfirmBtn": "確定",
|
||||
"Connect": "接続",
|
||||
"CopyLink": "リンクと認証コードのコピー",
|
||||
"CopyShareURLSuccess": "レプリケーション共有住所成功",
|
||||
"CreateLink": "シェアリンクの作成",
|
||||
"CreateSuccess": "作成に成功しました",
|
||||
"Custom Setting": "カスタム設定",
|
||||
"DownArrow": "下向き矢印",
|
||||
"Download": "ダウンロード",
|
||||
"DownloadSuccess": "ダウンロードに成功しました",
|
||||
"EndFileTransfer": "ファイル転送終了",
|
||||
"ExceedTransferSize": "最大転送サイズを超えています",
|
||||
"Expand": "展開",
|
||||
"ExpiredTime": "有効期限",
|
||||
"GetShareUser": "ユーザー名の入力",
|
||||
"Hotkeys": "ショートカットキー",
|
||||
"InputVerifyCode": "認証コードを入力してください",
|
||||
"JoinShare": "共有セッションに参加",
|
||||
"JoinedWithSuccess": "正常に参加しました",
|
||||
"KubernetesManagement": "Kubernetes 管理",
|
||||
"LeaveShare": "共有セッションから退出",
|
||||
"LeftArrow": "戻る矢印",
|
||||
"LinkAddr": "リンク先",
|
||||
"Minute": "分間",
|
||||
"Minutes": "分間",
|
||||
|
@ -22,12 +34,17 @@
|
|||
"MustSelectOneFile": "ファイルを選択する必要があります",
|
||||
"NoLink": "住所なし",
|
||||
"OnlineUsers": "オンラインスタッフ",
|
||||
"Paste": "貼り付け",
|
||||
"PauseSession": "セッションを一時停止",
|
||||
"ReadOnly": "読み取り専用",
|
||||
"Reconnect": "再接続",
|
||||
"Refresh": "リフレッシュ",
|
||||
"Remove": "削除",
|
||||
"RemoveShareUser": "あなたはすでに共有セッションから削除されました」という意味です",
|
||||
"RemoveShareUserConfirm": "共有セッションから削除してもよろしいですか?",
|
||||
"ResumeSession": "セッションを再開",
|
||||
"RightArrow": "進む矢印",
|
||||
"Search": "検索",
|
||||
"SelectAction": "選択してください",
|
||||
"SelectTheme": "テーマを選択してください",
|
||||
"Self": "自分",
|
||||
|
@ -41,6 +58,7 @@
|
|||
"Theme": "テーマ",
|
||||
"ThemeColors": "テーマカラー",
|
||||
"ThemeConfig": "テーマ",
|
||||
"UpArrow": "上向き矢印",
|
||||
"Upload": "アップロード",
|
||||
"UploadSuccess": "アップロード成功",
|
||||
"UploadTips": "ファイルをここにドラッグするか、アップロードをクリックします",
|
||||
|
@ -48,8 +66,6 @@
|
|||
"User": "ユーザー",
|
||||
"VerifyCode": "認証コード",
|
||||
"WaitFileTransfer": "ファイル転送終了待ち",
|
||||
"Writable": "書き込み可能",
|
||||
"JoinedWithSuccess": "正常に参加しました",
|
||||
"KubernetesManagement": "Kubernetes 管理",
|
||||
"WebSocketClosed": "WebSocket 閉店"
|
||||
"WebSocketClosed": "WebSocket 閉店",
|
||||
"Writable": "書き込み可能"
|
||||
}
|
|
@ -1,21 +1,28 @@
|
|||
{
|
||||
"ActionPerm": "操作权限",
|
||||
"Cancel": "取消",
|
||||
"Confirm": "确认",
|
||||
"ConfirmBtn": "确定",
|
||||
"Connect": "连接",
|
||||
"CopyLink": "复制链接及验证码",
|
||||
"CopyShareURLSuccess": "复制分享地址成功",
|
||||
"CreateLink": "创建分享链接",
|
||||
"CreateSuccess": "创建成功",
|
||||
"Confirm": "确认",
|
||||
"DownArrow": "向下箭头",
|
||||
"Download": "下载",
|
||||
"DownloadSuccess": "下载成功",
|
||||
"EndFileTransfer": "文件传输结束",
|
||||
"ExceedTransferSize": "超过最大传输大小",
|
||||
"Expand": "展开",
|
||||
"ExpiredTime": "有效期限",
|
||||
"GetShareUser": "输入用户名",
|
||||
"Hotkeys": "快捷键",
|
||||
"InputVerifyCode": "请输入验证码",
|
||||
"JoinShare": "加入共享",
|
||||
"JoinedWithSuccess": "已成功加入",
|
||||
"KubernetesManagement": "Kubernetes 管理",
|
||||
"LeaveShare": "离开共享",
|
||||
"LeftArrow": "后退箭头",
|
||||
"LinkAddr": "链接地址",
|
||||
"Minute": "分钟",
|
||||
"Minutes": "分钟",
|
||||
|
@ -23,12 +30,16 @@
|
|||
"MustSelectOneFile": "必须选择一个文件",
|
||||
"NoLink": "无地址",
|
||||
"OnlineUsers": "在线人员",
|
||||
"Paste": "粘贴",
|
||||
"PauseSession": "暂停此会话",
|
||||
"ReadOnly": "只读",
|
||||
"Refresh": "刷新",
|
||||
"Remove": "移除",
|
||||
"RemoveShareUser": "你已经被移除共享会话",
|
||||
"RemoveShareUserConfirm": "确定要移除该用户吗?",
|
||||
"ResumeSession": "恢复此会话",
|
||||
"RightArrow": "前进箭头",
|
||||
"Search": "搜索",
|
||||
"SelectAction": "请选择",
|
||||
"SelectTheme": "请选择主题",
|
||||
"Self": "我",
|
||||
|
@ -42,6 +53,7 @@
|
|||
"Theme": "主题",
|
||||
"ThemeColors": "主题颜色",
|
||||
"ThemeConfig": "主题",
|
||||
"UpArrow": "向上箭头",
|
||||
"Upload": "上传",
|
||||
"UploadSuccess": "上传成功",
|
||||
"UploadTips": "将文件拖到此处,或点击上传",
|
||||
|
@ -49,8 +61,10 @@
|
|||
"User": "用户",
|
||||
"VerifyCode": "验证码",
|
||||
"WaitFileTransfer": "等待文件传输结束",
|
||||
"Writable": "读写",
|
||||
"JoinedWithSuccess": "已成功加入",
|
||||
"KubernetesManagement": "Kubernetes 管理",
|
||||
"WebSocketClosed": "WebSocket 已关闭"
|
||||
}
|
||||
"WebSocketClosed": "WebSocket 已关闭",
|
||||
"Reconnect": "重新连接",
|
||||
"Close Current Tab": "关闭当前",
|
||||
"Close All Tabs": "关闭所有",
|
||||
"Clone Connect": "复制窗口",
|
||||
"Custom Setting": "自定义设置"
|
||||
}
|
|
@ -1,20 +1,32 @@
|
|||
{
|
||||
"ActionPerm": "操作權限",
|
||||
"Cancel": "取消",
|
||||
"Clone Connect": "複製視窗",
|
||||
"Close All Tabs": "關閉全部",
|
||||
"Close Current Tab": "關閉當前",
|
||||
"Confirm": "確認",
|
||||
"ConfirmBtn": "確定",
|
||||
"Connect": "連接",
|
||||
"CopyLink": "複製連結及驗證碼",
|
||||
"CopyShareURLSuccess": "複製分享地址成功",
|
||||
"CreateLink": "創建分享連結",
|
||||
"CreateSuccess": "創建成功",
|
||||
"Custom Setting": "自訂設定",
|
||||
"DownArrow": "向下箭頭",
|
||||
"Download": "下載",
|
||||
"DownloadSuccess": "下載成功",
|
||||
"EndFileTransfer": "文件傳輸結束",
|
||||
"ExceedTransferSize": "超過最大傳輸大小",
|
||||
"Expand": "展開",
|
||||
"ExpiredTime": "有效期限",
|
||||
"GetShareUser": "輸入使用者名稱",
|
||||
"Hotkeys": "快速鍵",
|
||||
"InputVerifyCode": "請輸入驗證碼",
|
||||
"JoinShare": "加入共享",
|
||||
"JoinedWithSuccess": "已成功加入",
|
||||
"KubernetesManagement": "Kubernetes 管理",
|
||||
"LeaveShare": "離開共享",
|
||||
"LeftArrow": "後退箭頭",
|
||||
"LinkAddr": "連結地址",
|
||||
"Minute": "分鐘",
|
||||
"Minutes": "分鐘",
|
||||
|
@ -22,12 +34,17 @@
|
|||
"MustSelectOneFile": "必須選擇一個文件",
|
||||
"NoLink": "無地址",
|
||||
"OnlineUsers": "在線人員",
|
||||
"Paste": "貼上",
|
||||
"PauseSession": "暫停此會話",
|
||||
"ReadOnly": "只讀",
|
||||
"Reconnect": "重新連線",
|
||||
"Refresh": "刷新",
|
||||
"Remove": "移除",
|
||||
"RemoveShareUser": "你已經被移除共享會話",
|
||||
"RemoveShareUserConfirm": "確定要移除該用戶嗎?",
|
||||
"ResumeSession": "恢復此會話",
|
||||
"RightArrow": "前進箭頭",
|
||||
"Search": "搜尋",
|
||||
"SelectAction": "請選擇",
|
||||
"SelectTheme": "請選擇主題",
|
||||
"Self": "我",
|
||||
|
@ -41,6 +58,7 @@
|
|||
"Theme": "主題",
|
||||
"ThemeColors": "主題顏色",
|
||||
"ThemeConfig": "主題",
|
||||
"UpArrow": "向上箭頭",
|
||||
"Upload": "上傳",
|
||||
"UploadSuccess": "上傳成功",
|
||||
"UploadTips": "將文件拖到此處,或點擊上傳",
|
||||
|
@ -48,8 +66,6 @@
|
|||
"User": "用戶",
|
||||
"VerifyCode": "驗證碼",
|
||||
"WaitFileTransfer": "等待文件傳輸結束",
|
||||
"Writable": "讀寫",
|
||||
"JoinedWithSuccess": "已成功加入",
|
||||
"KubernetesManagement": "Kubernetes 管理",
|
||||
"WebSocketClosed": "WebSocket 已關閉"
|
||||
"WebSocketClosed": "WebSocket 已關閉",
|
||||
"Writable": "讀寫"
|
||||
}
|
|
@ -67,8 +67,9 @@
|
|||
"AddUserGroupToThisPermission": "Add user groups",
|
||||
"AddUserToThisPermission": "Add users",
|
||||
"Address": "Address",
|
||||
"AdhocCreate": "Create the command",
|
||||
"AdhocDetail": "Command details",
|
||||
"AdhocManage": "Command",
|
||||
"AdhocManage": "Script",
|
||||
"AdhocUpdate": "Update the command",
|
||||
"Advanced": "Advanced settings",
|
||||
"AfterChange": "After changes",
|
||||
|
@ -116,6 +117,7 @@
|
|||
"ApprovaLevel": "Approval information",
|
||||
"ApprovalLevel": "Approval level",
|
||||
"ApprovalProcess": "Approval process",
|
||||
"ApprovalSelected": "Batch approval",
|
||||
"Approved": "Agreed",
|
||||
"ApproverNumbers": "Approvers",
|
||||
"ApsaraStack": "Alibaba private cloud",
|
||||
|
@ -543,6 +545,7 @@
|
|||
"Gateway": "Gateway",
|
||||
"GatewayCreate": "Create gateway",
|
||||
"GatewayList": "Gateways",
|
||||
"GatewayPlatformHelpText": "Only platforms with names starting with ‘Gateway’ can be used as gateways.",
|
||||
"GatewayUpdate": "Update the gateway",
|
||||
"GatherAccounts": "Gather accounts",
|
||||
"GatherAccountsHelpText": "Collect account information on assets. the collected account information can be imported into the system for centralized management.",
|
||||
|
@ -995,6 +998,7 @@
|
|||
"Resume": "Recovery",
|
||||
"ResumeTaskSendSuccessMsg": "Recovery task issued, please refresh later",
|
||||
"Retry": "Retry",
|
||||
"RetrySelected": "Retry selected",
|
||||
"Reviewer": "Approvers",
|
||||
"Role": "Role",
|
||||
"RoleCreate": "Create role",
|
||||
|
@ -1021,6 +1025,7 @@
|
|||
"RunasHelpText": "Enter username for running script",
|
||||
"RunasPolicy": "Account policy",
|
||||
"RunasPolicyHelpText": "When there are no users currently running on the asset, what account selection strategy should be adopted. skip: do not execute. prioritize privileged accounts: if there are privileged accounts, select them first; if not, select regular accounts. only privileged accounts: select only from privileged accounts; if none exist, do not execute.",
|
||||
"Running": "Running",
|
||||
"RunningPath": "Running path",
|
||||
"RunningPathHelpText": "Enter the run path of the script, this setting only applies to shell scripts",
|
||||
"RunningTimes": "Last 5 run times",
|
||||
|
@ -1037,7 +1042,6 @@
|
|||
"SameAccount": "Same account",
|
||||
"SameAccountTip": "Account with the same username as authorized users",
|
||||
"SameTypeAccountTip": "An account with the same username and key type already exists",
|
||||
"Share": "Share",
|
||||
"Saturday": "Sat",
|
||||
"Save": "Save",
|
||||
"SaveAdhoc": "Save command",
|
||||
|
@ -1087,6 +1091,7 @@
|
|||
"SessionData": "Session data",
|
||||
"SessionDetail": "Session details",
|
||||
"SessionID": "Session id",
|
||||
"SessionJoinRecords": "collaboration records",
|
||||
"SessionList": "Asset sessions",
|
||||
"SessionMonitor": "Monitor",
|
||||
"SessionOffline": "Historical sessions",
|
||||
|
@ -1108,6 +1113,7 @@
|
|||
"Setting": "Setting",
|
||||
"SettingInEndpointHelpText": "Configure service address and port in system settings / component settings / server endpoints",
|
||||
"Settings": "System settings",
|
||||
"Share": "Share",
|
||||
"Show": "Display",
|
||||
"ShowAssetAllChildrenNode": "Show all sub-nodes assets",
|
||||
"ShowAssetOnlyCurrentNode": "Only show current node assets",
|
||||
|
@ -1304,6 +1310,7 @@
|
|||
"UploadCsvLth10MHelpText": "Only csv/xlsx can be uploaded, and no more than 10m",
|
||||
"UploadDir": "Upload path",
|
||||
"UploadFileLthHelpText": "Less than {limit}m supported",
|
||||
"UploadHelpText": "Please upload a .zip file containing the following sample directory structure",
|
||||
"UploadPlaybook": "Upload playbook",
|
||||
"UploadSucceed": "Upload succeeded",
|
||||
"UploadZipTips": "Please upload a file in zip format",
|
||||
|
@ -1389,13 +1396,8 @@
|
|||
"ZoneHelpMessage": "The zone is the location where assets are located, which can be a data center, public cloud, or VPC. Gateways can be set up within the region. When the network cannot be directly accessed, users can utilize gateways to login to the assets.",
|
||||
"ZoneList": "Zones",
|
||||
"ZoneUpdate": "Update the zone",
|
||||
"disallowSelfUpdateFields": "Not allowed to modify the current fields yourself",
|
||||
"forceEnableMFAHelpText": "If force enable, user can not disable by themselves",
|
||||
"removeWarningMsg": "Are you sure you want to remove",
|
||||
"RetrySelected": "Retry selected",
|
||||
"Running": "Running",
|
||||
"AdhocCreate": "Create the command",
|
||||
"UploadHelpText": "Please upload a .zip file containing the following sample directory structure",
|
||||
"SessionJoinRecords": "collaboration records",
|
||||
"ApprovalSelected": "Batch approval",
|
||||
"disallowSelfUpdateFields": "Not allowed to modify the current fields yourself"
|
||||
}
|
||||
"TaskPath": "Task path"
|
||||
}
|
|
@ -67,8 +67,9 @@
|
|||
"AddUserGroupToThisPermission": "ユーザーグループを追加",
|
||||
"AddUserToThisPermission": "ユーザーを追加する",
|
||||
"Address": "アドレス",
|
||||
"AdhocCreate": "アドホックコマンドを作成",
|
||||
"AdhocDetail": "コマンド詳細",
|
||||
"AdhocManage": "コマンド",
|
||||
"AdhocManage": "スクリプト管理",
|
||||
"AdhocUpdate": "コマンドを更新",
|
||||
"Advanced": "高度な設定",
|
||||
"AfterChange": "変更後",
|
||||
|
@ -116,6 +117,7 @@
|
|||
"ApprovaLevel": "承認情報",
|
||||
"ApprovalLevel": "承認レベル",
|
||||
"ApprovalProcess": "承認プロセス",
|
||||
"ApprovalSelected": "大量承認です",
|
||||
"Approved": "同意済み",
|
||||
"ApproverNumbers": "アプルーバの数",
|
||||
"ApsaraStack": "アリババクラウド専用クラウド",
|
||||
|
@ -331,7 +333,7 @@
|
|||
"CommunityEdition": "コミュニティ版",
|
||||
"Component": "コンポーネント",
|
||||
"ComponentMonitor": "コンポーネントの監視",
|
||||
"Components": "コンポーネント設定",
|
||||
"Components": "コンポーネントリスト",
|
||||
"ConceptContent": "あなたにはPythonインタープリタのように行動してほしい。Pythonのコードを提供しますので、それを実行してください。説明は一切不要です。コードの出力以外では何も反応しないでください。",
|
||||
"ConceptTitle": "🤔 Python インタープリター",
|
||||
"Config": "設定",
|
||||
|
@ -558,6 +560,7 @@
|
|||
"Gateway": "ゲートウェイ",
|
||||
"GatewayCreate": "ゲートウェイの作成",
|
||||
"GatewayList": "ゲートウェイリスト",
|
||||
"GatewayPlatformHelpText": "ゲートウェイプラットフォームは、Gatewayで始まるプラットフォームのみ選択可能です。",
|
||||
"GatewayUpdate": "ゲートウェイの更新",
|
||||
"GatherAccounts": "アカウント収集",
|
||||
"GatherAccountsHelpText": "資産上のアカウント情報を収集します。収集したアカウント情報は、システムにインポートして一元管理が可能です",
|
||||
|
@ -1029,6 +1032,7 @@
|
|||
"Resume": "回復",
|
||||
"ResumeTaskSendSuccessMsg": "リカバリータスクが発行されました、しばらくしてから更新してご確認ください",
|
||||
"Retry": "再試行",
|
||||
"RetrySelected": "選択したものを再試行",
|
||||
"Reviewer": "承認者",
|
||||
"Role": "役割",
|
||||
"RoleCreate": "ロール作成",
|
||||
|
@ -1055,6 +1059,7 @@
|
|||
"RunasHelpText": "実行スクリプトのユーザー名を入力してください",
|
||||
"RunasPolicy": "アカウント戦略",
|
||||
"RunasPolicyHelpText": "現在の資産にはこの実行ユーザーがいない場合、どのアカウント選択戦略を採用するか。スキップ:実行しない。特権アカウントを優先:特権アカウントがあれば最初に特権アカウントを選び、なければ一般アカウントを選ぶ。特権アカウントのみ:特権アカウントからのみ選択し、なければ実行しない",
|
||||
"Running": "実行中",
|
||||
"RunningPath": "実行パス",
|
||||
"RunningPathHelpText": "スクリプトの実行パスを記入してください、この設定はシェルスクリプトのみ有効です",
|
||||
"RunningTimes": "最近5回の実行時間",
|
||||
|
@ -1063,7 +1068,7 @@
|
|||
"SMSProvider": "メッセージサービスプロバイダ",
|
||||
"SMTP": "メールサーバ",
|
||||
"SPECIAL_CHAR_REQUIRED": "特別な文字を含む必要があります",
|
||||
"SSHKey": "SSH公開鍵",
|
||||
"SSHKey": "SSHキー",
|
||||
"SSHKeyOfProfileSSHUpdatePage": "下のボタンをクリックしてSSH公開鍵をリセットおよびダウンロードするか、あなたのSSH公開鍵をコピーして提出できます。",
|
||||
"SSHPort": "SSH ポート",
|
||||
"SSHSecretKey": "SSHキー",
|
||||
|
@ -1071,7 +1076,6 @@
|
|||
"SameAccount": "同名アカウント",
|
||||
"SameAccountTip": "権限を持つユーザーのユーザー名と同じアカウント",
|
||||
"SameTypeAccountTip": "同じユーザー名、鍵の種類のアカウントがすでに存在しています",
|
||||
"Share": "共有",
|
||||
"Saturday": "土曜日",
|
||||
"Save": "保存",
|
||||
"SaveAdhoc": "コマンドを保存する",
|
||||
|
@ -1122,6 +1126,7 @@
|
|||
"SessionData": "セッションデータ",
|
||||
"SessionDetail": "セッションの詳細",
|
||||
"SessionID": "セッションID",
|
||||
"SessionJoinRecords": "協力記録",
|
||||
"SessionList": "セッション記録",
|
||||
"SessionMonitor": "監視",
|
||||
"SessionOffline": "過去のセッション",
|
||||
|
@ -1143,6 +1148,7 @@
|
|||
"Setting": "設定",
|
||||
"SettingInEndpointHelpText": "システム設定/コンポーネント設定/サーバーエンドポイントでサービスのアドレスとポートを設定してください",
|
||||
"Settings": "システム設定",
|
||||
"Share": "共有",
|
||||
"Show": "表示",
|
||||
"ShowAssetAllChildrenNode": "すべての子ノードの資産を表示",
|
||||
"ShowAssetOnlyCurrentNode": "現在のノードアセットのみを表示",
|
||||
|
@ -1251,6 +1257,7 @@
|
|||
"TaskID": "タスク ID",
|
||||
"TaskList": "タスク一覧",
|
||||
"TaskMonitor": "タスクモニタリング",
|
||||
"TaskPath": "タスクパス",
|
||||
"TechnologyConsult": "技術相談",
|
||||
"TempPasswordTip": "一時的なパスワードの有効期間は300秒で、使用後すぐに無効になります",
|
||||
"TempToken": "一時的なパスワード",
|
||||
|
@ -1345,6 +1352,7 @@
|
|||
"UploadCsvLth10MHelpText": "アップロード可能なのは csv/xlsx のみで、10Mを超えないこと",
|
||||
"UploadDir": "アップロードディレクトリ",
|
||||
"UploadFileLthHelpText": "{limit}MB以下のファイルのみアップロード可能",
|
||||
"UploadHelpText": "次のサンプル構造ディレクトリを含む .zip ファイルをアップロードしてください。",
|
||||
"UploadPlaybook": "Playbookのアップロード",
|
||||
"UploadSucceed": "アップロード成功",
|
||||
"UploadZipTips": "zip形式のファイルをアップロードしてください",
|
||||
|
@ -1385,6 +1393,7 @@
|
|||
"Valid": "有効",
|
||||
"Variable": "変数",
|
||||
"VariableHelpText": "コマンド中で {{ key }} を使用して内蔵変数を読み取ることができます",
|
||||
"VaultHCPMountPoint": "Vault サーバのマウントポイント、デフォルトはjumpserver",
|
||||
"VaultHelpText": "1. セキュリティ上の理由により、設定ファイルで Vault ストレージをオンにする必要があります。<br>2. オンにした後、他の設定を入力してテストを行います。<br>3. データ同期を行います。同期は一方向です。ローカルデータベースからリモートの Vault にのみ同期します。同期が終了すればローカルデータベースはパスワードを保管していませんので、データのバックアップをお願いします。<br>4. Vault の設定を二度変更した後はサービスを再起動する必要があります。",
|
||||
"VerificationCodeSent": "認証コードが送信されました",
|
||||
"VerifySignTmpl": "認証コードのSMSテンプレート",
|
||||
|
@ -1429,11 +1438,7 @@
|
|||
"ZoneHelpMessage": "エリアとはアセットの位置で、データセンターやパブリッククラウド、あるいはVPCが該当します。エリアにはゲートウェイを設定でき、ネットワークが直接接続できない場合、ゲートウェイを経由してアセットにログインすることができます",
|
||||
"ZoneList": "地域リスト",
|
||||
"ZoneUpdate": "更新エリア",
|
||||
"disallowSelfUpdateFields": "現在のフィールドを自分で変更することは許可されていません",
|
||||
"forceEnableMFAHelpText": "強制的に有効化すると、ユーザーは自分で無効化することができません。",
|
||||
"removeWarningMsg": "削除してもよろしいですか",
|
||||
"AdhocCreate": "アドホックコマンドを作成",
|
||||
"UploadHelpText": "次のサンプル構造ディレクトリを含む .zip ファイルをアップロードしてください。",
|
||||
"SessionJoinRecords": "協力記録",
|
||||
"ApprovalSelected": "大量承認です",
|
||||
"disallowSelfUpdateFields": "現在のフィールドを自分で変更することは許可されていません"
|
||||
}
|
||||
"removeWarningMsg": "削除してもよろしいですか"
|
||||
}
|
|
@ -67,8 +67,9 @@
|
|||
"AddUserGroupToThisPermission": "添加用户组",
|
||||
"AddUserToThisPermission": "添加用户",
|
||||
"Address": "地址",
|
||||
"AdhocCreate": "创建命令",
|
||||
"AdhocDetail": "命令详情",
|
||||
"AdhocManage": "命令管理",
|
||||
"AdhocManage": "脚本管理",
|
||||
"AdhocUpdate": "更新命令",
|
||||
"Advanced": "高级设置",
|
||||
"AfterChange": "变更后",
|
||||
|
@ -116,6 +117,7 @@
|
|||
"ApprovaLevel": "审批信息",
|
||||
"ApprovalLevel": "审批级别",
|
||||
"ApprovalProcess": "审批流程",
|
||||
"ApprovalSelected": "批量审批",
|
||||
"Approved": "已同意",
|
||||
"ApproverNumbers": "审批人数量",
|
||||
"ApsaraStack": "阿里云专有云",
|
||||
|
@ -316,7 +318,7 @@
|
|||
"CommunityEdition": "社区版",
|
||||
"Component": "组件",
|
||||
"ComponentMonitor": "组件监控",
|
||||
"Components": "组件设置",
|
||||
"Components": "组件列表",
|
||||
"ConceptContent": "我想让你像一个 Python 解释器一样行事。我将给你 Python 代码,你将执行它。不要提供任何解释。除了代码的输出,不要用任何东西来回应。",
|
||||
"ConceptTitle": "🤔 Python 解释器 ",
|
||||
"Config": "配置",
|
||||
|
@ -543,6 +545,7 @@
|
|||
"Gateway": "网关",
|
||||
"GatewayCreate": "创建网关",
|
||||
"GatewayList": "网关列表",
|
||||
"GatewayPlatformHelpText": "网关平台只能选择以 Gateway 开头的平台",
|
||||
"GatewayUpdate": "更新网关",
|
||||
"GatherAccounts": "账号收集",
|
||||
"GatherAccountsHelpText": "收集资产上的账号信息。收集后的账号信息可以导入到系统中,方便统一管理",
|
||||
|
@ -998,6 +1001,7 @@
|
|||
"Resume": "恢复",
|
||||
"ResumeTaskSendSuccessMsg": "恢复任务已下发,请稍后刷新查看",
|
||||
"Retry": "重试",
|
||||
"RetrySelected": "重试所选",
|
||||
"Reviewer": "审批人",
|
||||
"Role": "角色",
|
||||
"RoleCreate": "创建角色",
|
||||
|
@ -1024,6 +1028,7 @@
|
|||
"RunasHelpText": "填写运行脚本的用户名",
|
||||
"RunasPolicy": "账号策略",
|
||||
"RunasPolicyHelpText": "当前资产上没此运行用户时,采取什么账号选择策略。跳过:不执行。优先特权账号:如果有特权账号先选特权账号,如果没有就选普通账号。仅特权账号:只从特权账号中选择,如果没有则不执行",
|
||||
"Running": "运行中",
|
||||
"RunningPath": "运行路径",
|
||||
"RunningPathHelpText": "填写脚本的运行路径,此设置仅 shell 脚本生效",
|
||||
"RunningTimes": "最近5次运行时间",
|
||||
|
@ -1032,7 +1037,7 @@
|
|||
"SMSProvider": "短信服务商",
|
||||
"SMTP": "邮件服务器",
|
||||
"SPECIAL_CHAR_REQUIRED": "必须包含特殊字符",
|
||||
"SSHKey": "SSH公钥",
|
||||
"SSHKey": "SSH密钥",
|
||||
"SSHKeyOfProfileSSHUpdatePage": "你可以点击下面的按钮重置并下载密钥,或者复制你的 SSH 公钥并提交。",
|
||||
"SSHPort": "SSH 端口",
|
||||
"SSHSecretKey": "SSH 密钥",
|
||||
|
@ -1040,7 +1045,6 @@
|
|||
"SameAccount": "同名账号",
|
||||
"SameAccountTip": "与被授权人用户名相同的账号",
|
||||
"SameTypeAccountTip": "相同用户名、密钥类型的账号已存在",
|
||||
"Share": "分享",
|
||||
"Saturday": "周六",
|
||||
"Save": "保存",
|
||||
"SaveAdhoc": "保存命令",
|
||||
|
@ -1090,6 +1094,7 @@
|
|||
"SessionData": "会话数据",
|
||||
"SessionDetail": "会话详情",
|
||||
"SessionID": "会话ID",
|
||||
"SessionJoinRecords": "协作记录",
|
||||
"SessionList": "会话记录",
|
||||
"SessionMonitor": "监控",
|
||||
"SessionOffline": "历史会话",
|
||||
|
@ -1111,6 +1116,7 @@
|
|||
"Setting": "设置",
|
||||
"SettingInEndpointHelpText": "在 系统设置 / 组件设置 / 服务端点 中配置服务地址和端口",
|
||||
"Settings": "系统设置",
|
||||
"Share": "分享",
|
||||
"Show": "显示",
|
||||
"ShowAssetAllChildrenNode": "显示所有子节点资产",
|
||||
"ShowAssetOnlyCurrentNode": "仅显示当前节点资产",
|
||||
|
@ -1307,6 +1313,7 @@
|
|||
"UploadCsvLth10MHelpText": "只能上传 csv/xlsx, 且不超过 10M",
|
||||
"UploadDir": "上传目录",
|
||||
"UploadFileLthHelpText": "只能上传小于{limit}MB文件",
|
||||
"UploadHelpText": "请上传包含以下示例结构目录的 .zip 压缩文件",
|
||||
"UploadPlaybook": "上传 Playbook",
|
||||
"UploadSucceed": "上传成功",
|
||||
"UploadZipTips": "请上传 zip 格式的文件",
|
||||
|
@ -1347,6 +1354,7 @@
|
|||
"Valid": "有效",
|
||||
"Variable": "变量",
|
||||
"VariableHelpText": "您可以在命令中使用 {{ key }} 读取内置变量",
|
||||
"VaultHCPMountPoint": "Vault 服务器的挂载点,默认为 jumpserver",
|
||||
"VaultHelpText": "1. 由于安全原因,需要配置文件中开启 Vault 存储。<br>2. 开启后,填写其他配置,进行测试。<br>3. 进行数据同步,同步是单向的,只会从本地数据库同步到远端 Vault,同步完成本地数据库不再存储密码,请备份好数据。<br>4. 二次修改 Vault 配置后需重启服务。",
|
||||
"VerificationCodeSent": "验证码已发送",
|
||||
"VerifySignTmpl": "验证码短信模板",
|
||||
|
@ -1391,14 +1399,8 @@
|
|||
"ZoneHelpMessage": "网域是资产所在的位置,可以是机房,公有云 或者 VPC。网域中可以设置网关,当网络不能直达的时候,可以使用网关跳转登录到资产",
|
||||
"ZoneList": "网域列表",
|
||||
"ZoneUpdate": "更新网域",
|
||||
"disallowSelfUpdateFields": "不允许自己修改当前字段",
|
||||
"forceEnableMFAHelpText": "如果强制启用,用户无法自行禁用",
|
||||
"removeWarningMsg": "你确定要移除",
|
||||
"VaultHCPMountPoint": "Vault 服务器的挂载点,默认为 jumpserver",
|
||||
"RetrySelected": "重试所选",
|
||||
"Running": "运行中",
|
||||
"AdhocCreate": "创建命令",
|
||||
"UploadHelpText": "请上传包含以下示例结构目录的 .zip 压缩文件",
|
||||
"SessionJoinRecords": "协作记录",
|
||||
"ApprovalSelected": "批量审批",
|
||||
"disallowSelfUpdateFields": "不允许自己修改当前字段"
|
||||
}
|
||||
"TaskPath": "任务路径"
|
||||
}
|
|
@ -85,8 +85,9 @@
|
|||
"AddUserToThisPermission": "新增使用者",
|
||||
"Address": "地址",
|
||||
"Addressee": "收件人",
|
||||
"AdhocCreate": "創建命令",
|
||||
"AdhocDetail": "命令詳情",
|
||||
"AdhocManage": "命令管理",
|
||||
"AdhocManage": "腳本管理",
|
||||
"AdhocUpdate": "更新命令",
|
||||
"Admin": "管理員",
|
||||
"AdminUser": "特權用戶",
|
||||
|
@ -155,6 +156,7 @@
|
|||
"ApprovaLevel": "審批資訊",
|
||||
"ApprovalLevel": "審批級別",
|
||||
"ApprovalProcess": "審批流程",
|
||||
"ApprovalSelected": "批次審批",
|
||||
"Approved": "已同意",
|
||||
"ApproverNumbers": "審批人數量",
|
||||
"ApsaraStack": "阿里雲專有雲",
|
||||
|
@ -424,7 +426,7 @@
|
|||
"CommunityEdition": "社區版",
|
||||
"Component": "組件",
|
||||
"ComponentMonitor": "組件監控",
|
||||
"Components": "組件設置",
|
||||
"Components": "組件列表",
|
||||
"ConceptContent": "我想讓你像一個 Python 解釋器一樣行事。我將給你 Python 代碼,你將執行它。不要提供任何解釋。除了代碼的輸出,不要用任何東西來回應。",
|
||||
"ConceptTitle": "🤔 Python 解釋器 ",
|
||||
"Config": "配置",
|
||||
|
@ -713,6 +715,7 @@
|
|||
"Gateway": "網關",
|
||||
"GatewayCreate": "創建網關",
|
||||
"GatewayList": "網關列表",
|
||||
"GatewayPlatformHelpText": "網關平台只能選擇以 Gateway 開頭的平台",
|
||||
"GatewayProtocolHelpText": "SSH網關,支持代理SSH,RDP和VNC",
|
||||
"GatewayUpdate": "更新網關",
|
||||
"GatherAccounts": "帳號收集",
|
||||
|
@ -767,8 +770,8 @@
|
|||
"IPLoginLimit": "IP 登入限制",
|
||||
"IPMatch": "IP 匹配",
|
||||
"IPNetworkSegment": "IP網段",
|
||||
"Icon": "圖示",
|
||||
"IPType": "IP 類型",
|
||||
"Icon": "圖示",
|
||||
"Id": "ID",
|
||||
"IdeaContent": "我想讓你充當一個 Linux 終端。我將輸入命令,你將回答終端應該顯示的內容。我希望你只在一個獨特的代碼塊內回復終端輸出,而不是其他。不要寫解釋。當我需要告訴你一些事情時,我會把文字放在大括號裡{備註文本}。",
|
||||
"IdeaTitle": "🌱 Linux 終端",
|
||||
|
@ -1329,6 +1332,7 @@
|
|||
"Resume": "恢復",
|
||||
"ResumeTaskSendSuccessMsg": "恢復任務已下發,請稍後刷新查看",
|
||||
"Retry": "重試",
|
||||
"RetrySelected": "重新嘗試所選",
|
||||
"Reviewer": "審批人",
|
||||
"Revise": "修改",
|
||||
"Role": "角色",
|
||||
|
@ -1359,6 +1363,7 @@
|
|||
"RunasHelpText": "填寫運行腳本的使用者名稱",
|
||||
"RunasPolicy": "帳號策略",
|
||||
"RunasPolicyHelpText": "當前資產上沒此運行用戶時,採取什麼帳號選擇策略。跳過:不執行。優先特權帳號:如果有特權帳號先選特權帳號,如果沒有就選普通帳號。僅特權帳號:只從特權帳號中選擇,如果沒有則不執行",
|
||||
"Running": "正在運行中的Vault 伺服器掛載點,預設為 jumpserver",
|
||||
"RunningPath": "運行路徑",
|
||||
"RunningPathHelpText": "填寫腳本的運行路徑,此設置僅 shell 腳本生效",
|
||||
"RunningTimes": " Last 5 run times",
|
||||
|
@ -1369,7 +1374,7 @@
|
|||
"SMSProvider": "簡訊服務商",
|
||||
"SMTP": "郵件伺服器",
|
||||
"SPECIAL_CHAR_REQUIRED": "須包含特殊字元",
|
||||
"SSHKey": "SSH公鑰",
|
||||
"SSHKey": "SSH金鑰",
|
||||
"SSHKeyOfProfileSSHUpdatePage": "複製你的公鑰到這裡",
|
||||
"SSHKeySetting": "SSH公鑰設置",
|
||||
"SSHPort": "SSH 埠",
|
||||
|
@ -1380,7 +1385,6 @@
|
|||
"SameAccount": "同名帳號",
|
||||
"SameAccountTip": "與被授權人使用者名稱相同的帳號",
|
||||
"SameTypeAccountTip": "相同使用者名稱、金鑰類型的帳號已存在",
|
||||
"Share": "分享",
|
||||
"Saturday": "週六",
|
||||
"Save": "保存",
|
||||
"SaveAdhoc": "保存命令",
|
||||
|
@ -1441,6 +1445,7 @@
|
|||
"SessionData": "會話數據",
|
||||
"SessionDetail": "會話詳情",
|
||||
"SessionID": "會話ID",
|
||||
"SessionJoinRecords": "協作記錄",
|
||||
"SessionList": "會話記錄",
|
||||
"SessionMonitor": "監控",
|
||||
"SessionOffline": "歷史會話",
|
||||
|
@ -1465,6 +1470,7 @@
|
|||
"Setting": "設置",
|
||||
"SettingInEndpointHelpText": "在 系統設置 / 組件設置 / 服務端點 中配置服務地址和埠",
|
||||
"Settings": "系統設置",
|
||||
"Share": "分享",
|
||||
"Show": "顯示",
|
||||
"ShowAssetAllChildrenNode": "顯示所有子節點資產",
|
||||
"ShowAssetOnlyCurrentNode": "僅顯示當前節點資產",
|
||||
|
@ -1596,6 +1602,7 @@
|
|||
"TaskID": "任務 ID",
|
||||
"TaskList": "工作列表",
|
||||
"TaskMonitor": "任務監控",
|
||||
"TaskPath": "任務路徑",
|
||||
"TechnologyConsult": "技術諮詢",
|
||||
"TempPassword": "臨時密碼有效期為 300 秒,使用後立刻失效",
|
||||
"TempPasswordTip": "臨時密碼有效時間為 300 秒,使用後立即失效",
|
||||
|
@ -1718,6 +1725,7 @@
|
|||
"UploadDir": "上傳目錄",
|
||||
"UploadFailed": "上傳失敗",
|
||||
"UploadFileLthHelpText": "只能上傳小於{limit}MB檔案",
|
||||
"UploadHelpText": "請上傳包含以下範例結構目錄的 .zip 壓縮文件",
|
||||
"UploadPlaybook": "上傳 Playbook",
|
||||
"UploadSucceed": "上傳成功",
|
||||
"UploadZipTips": "請上傳 zip 格式的文件",
|
||||
|
@ -1782,6 +1790,7 @@
|
|||
"Variable": "變數",
|
||||
"VariableHelpText": "您可以在命令中使用 {{ key }} 讀取內建變數",
|
||||
"Vault": "密碼匣子",
|
||||
"VaultHCPMountPoint": "重新嘗試所選",
|
||||
"VaultHelpText": "1. 由於安全原因,需要配置文件中開啟 Vault 儲存。<br>2. 開啟後,填寫其他配置,進行測試。<br>3. 進行數據同步,同步是單向的,只會從本地資料庫同步到遠端 Vault,同步完成本地資料庫不再儲存密碼,請備份好數據。<br>4. 二次修改 Vault 配置後需重啟服務。",
|
||||
"Vendor": "製造商",
|
||||
"VerificationCodeSent": "驗證碼已發送",
|
||||
|
@ -1914,7 +1923,6 @@
|
|||
"consult": "諮詢",
|
||||
"containerName": "容器名稱",
|
||||
"contents": "內容",
|
||||
"AdhocCreate": "創建命令",
|
||||
"createBy": "創建者",
|
||||
"createErrorMsg": "創建失敗",
|
||||
"createSuccessMsg": "導入創建成功,總共:{count}",
|
||||
|
@ -2273,8 +2281,5 @@
|
|||
"weComTest": "測試",
|
||||
"week": "周",
|
||||
"weekOf": "周的星期",
|
||||
"wildcardsAllowed": "允許的通配符",
|
||||
"UploadHelpText": "請上傳包含以下範例結構目錄的 .zip 壓縮文件",
|
||||
"SessionJoinRecords": "協作記錄",
|
||||
"ApprovalSelected": "批次審批"
|
||||
}
|
||||
"wildcardsAllowed": "允許的通配符"
|
||||
}
|
|
@ -91,7 +91,7 @@
|
|||
"Info": "Info",
|
||||
"InstallClientMsg": "JumpServer client not found, Go to download and install?",
|
||||
"Japanese keyboard layout": "Japanese (Qwerty)",
|
||||
"Keyboard keys": "Option + Left / Option + Right",
|
||||
"Keyboard keys": "Option + Shift + Left / Right",
|
||||
"Keyboard layout": "Keyboard layout",
|
||||
"Keyboard switch session": "Switch session → Shortcut keys",
|
||||
"Kubernetes": "Kubernetes",
|
||||
|
@ -123,6 +123,7 @@
|
|||
"NoTabs": "No tabs",
|
||||
"Not quick command": "Not quick command",
|
||||
"Open in new window": "Open in new window",
|
||||
"Operator": "Operator",
|
||||
"Password": "Password",
|
||||
"Password is token password on the table": "Password is token password on the table",
|
||||
"Password is your password login to system": "Password is your password login to system",
|
||||
|
@ -200,6 +201,7 @@
|
|||
"Users": "",
|
||||
"Using token": "Using token",
|
||||
"View": "View",
|
||||
"Viewer": "Viewer",
|
||||
"VirtualApp": "Virtual App",
|
||||
"Web Terminal": "Web Terminal",
|
||||
"Website": "Website",
|
||||
|
@ -209,16 +211,14 @@
|
|||
"asset": "asset",
|
||||
"cols": "cols",
|
||||
"confirm": "confirm",
|
||||
"connect info": "connect info",
|
||||
"connectDisabledTipsMethodDisabled": "Tips: No valid remote application deployment machine found, current resource cannot be connected. Please contact the administrator for assistance",
|
||||
"connectDisabledTipsNoAccount": "Tips: No valid authorization account found, current resource cannot be connected. Please contact the administrator for assistance",
|
||||
"connectDisabledTipsNoConnectMethod": "Tips: No valid connection method found, current resource cannot be connected. Please contact the administrator for assistance",
|
||||
"connectDisabledTipsMethodDisabled": "Tips: No valid remote application deployment machine found, current resource cannot be connected. Please contact the administrator for assistance",
|
||||
"connect info": "connect info",
|
||||
"download": "download",
|
||||
"rows": "rows",
|
||||
"start time": "start time",
|
||||
"success": "success",
|
||||
"system user": "system user",
|
||||
"user": "user",
|
||||
"Viewer": "Viewer",
|
||||
"Operator": "Operator"
|
||||
}
|
||||
"user": "user"
|
||||
}
|
|
@ -90,7 +90,7 @@
|
|||
"Info": "ヒント",
|
||||
"InstallClientMsg": "JumpServerクライアントがインストールされていない、今ダウンロードしてインストールしますか?",
|
||||
"Japanese keyboard layout": "Japanese (Qwerty)",
|
||||
"Keyboard keys": "Option + Left / Option + Right",
|
||||
"Keyboard keys": "Option + Shift + Left / Right",
|
||||
"Keyboard layout": "キーボードレイアウト",
|
||||
"Keyboard switch session": "セッションの切り替え → ショートカットキー",
|
||||
"Kubernetes": "Kubernetes",
|
||||
|
@ -124,6 +124,7 @@
|
|||
"Normal accounts": "通常のログインアカウント",
|
||||
"Not quick command": "非高速コマンド",
|
||||
"Open in new window": "新しいウィンドウが開きます",
|
||||
"Operator": "オペレーター",
|
||||
"Password": "パスワード",
|
||||
"Password is token password on the table": "パスワードは、テーブルのトークンパスワードです",
|
||||
"Password is your password login to system": "パスワードは、システムにログインするためのパスワードです",
|
||||
|
@ -205,6 +206,7 @@
|
|||
"Users": "ユーザー",
|
||||
"Using token": "トークンを使用する",
|
||||
"View": "ビュー",
|
||||
"Viewer": "ビューア",
|
||||
"VirtualApp": "仮想アプリ",
|
||||
"Web Terminal": "Web端末",
|
||||
"Website": "公式サイト",
|
||||
|
@ -222,7 +224,5 @@
|
|||
"start time": "開始時間",
|
||||
"success": "成功",
|
||||
"system user": "システムユーザー",
|
||||
"user": "ユーザー",
|
||||
"Viewer": "ビューア",
|
||||
"Operator": "オペレーター"
|
||||
}
|
||||
"user": "ユーザー"
|
||||
}
|
|
@ -89,7 +89,7 @@
|
|||
"Info": "提示",
|
||||
"InstallClientMsg": "JumpServer 客户端没有安装,现在去下载安装?",
|
||||
"Japanese keyboard layout": "Japanese (Qwerty)",
|
||||
"Keyboard keys": "Option + Left / Option + Right",
|
||||
"Keyboard keys": "Option + Shift + Left / Right",
|
||||
"Keyboard layout": "键盘布局",
|
||||
"Keyboard switch session": "切换会话 → 快捷键",
|
||||
"Kubernetes": "Kubernetes",
|
||||
|
@ -122,6 +122,7 @@
|
|||
"NoTabs": "没有窗口",
|
||||
"Not quick command": "暂无快捷命令",
|
||||
"Open in new window": "新窗口打开",
|
||||
"Operator": "操作人",
|
||||
"Password": "密码",
|
||||
"Password is token password on the table": "密码是表格中的 Token 密码",
|
||||
"Password is your password login to system": "密码是你登录系统的密码",
|
||||
|
@ -199,6 +200,7 @@
|
|||
"Users": "用户",
|
||||
"Using token": "使用 Token",
|
||||
"View": "视图",
|
||||
"Viewer": "查看人",
|
||||
"VirtualApp": "虚拟应用",
|
||||
"Web Terminal": "Web终端",
|
||||
"Website": "官网",
|
||||
|
@ -207,16 +209,14 @@
|
|||
"asset": "资产",
|
||||
"cols": "列数",
|
||||
"confirm": "确认",
|
||||
"connect info": "连接信息",
|
||||
"connectDisabledTipsMethodDisabled": "提示:未找到有效的远程应用发布机,当前资源无法连接,请联系管理员进行处理",
|
||||
"connectDisabledTipsNoAccount": "提示:未找到有效的授权账号,当前资源无法连接,请联系管理员进行处理",
|
||||
"connectDisabledTipsNoConnectMethod": "提示:未找到有效的连接方式,当前资源无法连接,请联系管理员进行处理",
|
||||
"connectDisabledTipsMethodDisabled": "提示:未找到有效的远程应用发布机,当前资源无法连接,请联系管理员进行处理",
|
||||
"connect info": "连接信息",
|
||||
"download": "下载",
|
||||
"rows": "行数",
|
||||
"start time": "开始时间",
|
||||
"success": "成功",
|
||||
"system user": "系统用户",
|
||||
"user": "用户",
|
||||
"Viewer": "查看人",
|
||||
"Operator": "操作人"
|
||||
}
|
||||
"user": "用户"
|
||||
}
|
|
@ -90,7 +90,7 @@
|
|||
"Info": "提示",
|
||||
"InstallClientMsg": "JumpServer 用戶端沒有安裝,現在去下載安裝?",
|
||||
"Japanese keyboard layout": "Japanese (Qwerty)",
|
||||
"Keyboard keys": "Option + Left / Option + Right",
|
||||
"Keyboard keys": "Option + Shift + Left / Right",
|
||||
"Keyboard layout": "鍵盤布局",
|
||||
"Keyboard switch session": "切換會話 → 快捷鍵",
|
||||
"Kubernetes": "Kubernetes",
|
||||
|
@ -123,6 +123,7 @@
|
|||
"NoTabs": "沒有視窗",
|
||||
"Not quick command": "暫無快捷命令",
|
||||
"Open in new window": "新窗口打開",
|
||||
"Operator": "操作人",
|
||||
"Password": "密碼",
|
||||
"Password is token password on the table": "密碼是表格中的 Token 密碼",
|
||||
"Password is your password login to system": "密碼是你登入系統的密碼",
|
||||
|
@ -203,6 +204,7 @@
|
|||
"Users": "用戶",
|
||||
"Using token": "使用 Token",
|
||||
"View": "視圖",
|
||||
"Viewer": "查看人",
|
||||
"VirtualApp": "虛擬應用",
|
||||
"Web Terminal": "Web終端",
|
||||
"Website": "官網",
|
||||
|
@ -220,7 +222,5 @@
|
|||
"start time": "開始時間",
|
||||
"success": "成功",
|
||||
"system user": "系統用戶",
|
||||
"user": "用戶",
|
||||
"Viewer": "查看人",
|
||||
"Operator": "操作人"
|
||||
}
|
||||
"user": "用戶"
|
||||
}
|
|
@ -288,6 +288,26 @@ class Config(dict):
|
|||
'AUTH_LDAP_USER_LOGIN_ONLY_IN_USERS': False,
|
||||
'AUTH_LDAP_OPTIONS_OPT_REFERRALS': -1,
|
||||
|
||||
# Auth LDAP HA settings
|
||||
'AUTH_LDAP_HA': False,
|
||||
'AUTH_LDAP_HA_SERVER_URI': 'ldap://localhost:389',
|
||||
'AUTH_LDAP_HA_BIND_DN': 'cn=admin,dc=jumpserver,dc=org',
|
||||
'AUTH_LDAP_HA_BIND_PASSWORD': '',
|
||||
'AUTH_LDAP_HA_SEARCH_OU': 'ou=tech,dc=jumpserver,dc=org',
|
||||
'AUTH_LDAP_HA_SEARCH_FILTER': '(cn=%(user)s)',
|
||||
'AUTH_LDAP_HA_START_TLS': False,
|
||||
'AUTH_LDAP_HA_USER_ATTR_MAP': {"username": "cn", "name": "sn", "email": "mail"},
|
||||
'AUTH_LDAP_HA_CONNECT_TIMEOUT': 10,
|
||||
'AUTH_LDAP_HA_CACHE_TIMEOUT': 3600 * 24 * 30,
|
||||
'AUTH_LDAP_HA_SEARCH_PAGED_SIZE': 1000,
|
||||
'AUTH_LDAP_HA_SYNC_IS_PERIODIC': False,
|
||||
'AUTH_LDAP_HA_SYNC_INTERVAL': None,
|
||||
'AUTH_LDAP_HA_SYNC_CRONTAB': None,
|
||||
'AUTH_LDAP_HA_SYNC_ORG_IDS': [DEFAULT_ID],
|
||||
'AUTH_LDAP_HA_SYNC_RECEIVERS': [],
|
||||
'AUTH_LDAP_HA_USER_LOGIN_ONLY_IN_USERS': False,
|
||||
'AUTH_LDAP_HA_OPTIONS_OPT_REFERRALS': -1,
|
||||
|
||||
# OpenID 配置参数
|
||||
# OpenID 公有配置参数 (version <= 1.5.8 或 version >= 1.5.8)
|
||||
'AUTH_OPENID': False,
|
||||
|
|
|
@ -0,0 +1,20 @@
|
|||
import os
|
||||
from logging.handlers import TimedRotatingFileHandler
|
||||
from datetime import datetime, timedelta
|
||||
|
||||
|
||||
class DailyTimedRotatingFileHandler(TimedRotatingFileHandler):
|
||||
def rotator(self, source, dest):
|
||||
""" Override the original method to rotate the log file daily."""
|
||||
dest = self._get_rotate_dest_filename(source)
|
||||
if os.path.exists(source) and not os.path.exists(dest):
|
||||
# 存在多个服务进程时, 保证只有一个进程成功 rotate
|
||||
os.rename(source, dest)
|
||||
|
||||
@staticmethod
|
||||
def _get_rotate_dest_filename(source):
|
||||
date_yesterday = (datetime.now() - timedelta(days=1)).strftime('%Y-%m-%d')
|
||||
path = [os.path.dirname(source), date_yesterday, os.path.basename(source)]
|
||||
filename = os.path.join(*path)
|
||||
os.makedirs(os.path.dirname(filename), exist_ok=True)
|
||||
return filename
|
|
@ -53,6 +53,44 @@ AUTH_LDAP_SYNC_ORG_IDS = CONFIG.AUTH_LDAP_SYNC_ORG_IDS
|
|||
AUTH_LDAP_SYNC_RECEIVERS = CONFIG.AUTH_LDAP_SYNC_RECEIVERS
|
||||
AUTH_LDAP_USER_LOGIN_ONLY_IN_USERS = CONFIG.AUTH_LDAP_USER_LOGIN_ONLY_IN_USERS
|
||||
|
||||
# Auth LDAP HA settings
|
||||
AUTH_LDAP_HA = CONFIG.AUTH_LDAP_HA
|
||||
AUTH_LDAP_HA_SERVER_URI = CONFIG.AUTH_LDAP_HA_SERVER_URI
|
||||
AUTH_LDAP_HA_BIND_DN = CONFIG.AUTH_LDAP_HA_BIND_DN
|
||||
AUTH_LDAP_HA_BIND_PASSWORD = CONFIG.AUTH_LDAP_HA_BIND_PASSWORD
|
||||
AUTH_LDAP_HA_SEARCH_OU = CONFIG.AUTH_LDAP_HA_SEARCH_OU
|
||||
AUTH_LDAP_HA_SEARCH_FILTER = CONFIG.AUTH_LDAP_HA_SEARCH_FILTER
|
||||
AUTH_LDAP_HA_START_TLS = CONFIG.AUTH_LDAP_HA_START_TLS
|
||||
AUTH_LDAP_HA_USER_ATTR_MAP = CONFIG.AUTH_LDAP_HA_USER_ATTR_MAP
|
||||
AUTH_LDAP_HA_USER_QUERY_FIELD = 'username'
|
||||
AUTH_LDAP_HA_GLOBAL_OPTIONS = {
|
||||
ldap.OPT_X_TLS_REQUIRE_CERT: ldap.OPT_X_TLS_NEVER,
|
||||
ldap.OPT_REFERRALS: CONFIG.AUTH_LDAP_HA_OPTIONS_OPT_REFERRALS
|
||||
}
|
||||
LDAP_HA_CACERT_FILE = os.path.join(PROJECT_DIR, "data", "certs", "ldap_ha_ca.pem")
|
||||
if os.path.isfile(LDAP_HA_CACERT_FILE):
|
||||
AUTH_LDAP_HA_GLOBAL_OPTIONS[ldap.OPT_X_TLS_CACERTFILE] = LDAP_CACERT_FILE
|
||||
LDAP_HA_CERT_FILE = os.path.join(PROJECT_DIR, "data", "certs", "ldap_ha_cert.pem")
|
||||
if os.path.isfile(LDAP_HA_CERT_FILE):
|
||||
AUTH_LDAP_HA_GLOBAL_OPTIONS[ldap.OPT_X_TLS_CERTFILE] = LDAP_HA_CERT_FILE
|
||||
LDAP_HA_KEY_FILE = os.path.join(PROJECT_DIR, "data", "certs", "ldap_ha_cert.key")
|
||||
if os.path.isfile(LDAP_HA_KEY_FILE):
|
||||
AUTH_LDAP_HA_GLOBAL_OPTIONS[ldap.OPT_X_TLS_KEYFILE] = LDAP_HA_KEY_FILE
|
||||
AUTH_LDAP_HA_CONNECTION_OPTIONS = {
|
||||
ldap.OPT_TIMEOUT: CONFIG.AUTH_LDAP_HA_CONNECT_TIMEOUT,
|
||||
ldap.OPT_NETWORK_TIMEOUT: CONFIG.AUTH_LDAP_HA_CONNECT_TIMEOUT
|
||||
}
|
||||
AUTH_LDAP_HA_CACHE_TIMEOUT = CONFIG.AUTH_LDAP_HA_CACHE_TIMEOUT
|
||||
AUTH_LDAP_HA_ALWAYS_UPDATE_USER = True
|
||||
|
||||
AUTH_LDAP_HA_SEARCH_PAGED_SIZE = CONFIG.AUTH_LDAP_HA_SEARCH_PAGED_SIZE
|
||||
AUTH_LDAP_HA_SYNC_IS_PERIODIC = CONFIG.AUTH_LDAP_HA_SYNC_IS_PERIODIC
|
||||
AUTH_LDAP_HA_SYNC_INTERVAL = CONFIG.AUTH_LDAP_HA_SYNC_INTERVAL
|
||||
AUTH_LDAP_HA_SYNC_CRONTAB = CONFIG.AUTH_LDAP_HA_SYNC_CRONTAB
|
||||
AUTH_LDAP_HA_SYNC_ORG_IDS = CONFIG.AUTH_LDAP_HA_SYNC_ORG_IDS
|
||||
AUTH_LDAP_HA_SYNC_RECEIVERS = CONFIG.AUTH_LDAP_HA_SYNC_RECEIVERS
|
||||
AUTH_LDAP_HA_USER_LOGIN_ONLY_IN_USERS = CONFIG.AUTH_LDAP_HA_USER_LOGIN_ONLY_IN_USERS
|
||||
|
||||
# ==============================================================================
|
||||
# 认证 OpenID 配置参数
|
||||
# 参考: https://django-oidc-rp.readthedocs.io/en/stable/settings.html
|
||||
|
@ -212,6 +250,7 @@ RBAC_BACKEND = 'rbac.backends.RBACBackend'
|
|||
AUTH_BACKEND_MODEL = 'authentication.backends.base.JMSModelBackend'
|
||||
AUTH_BACKEND_PUBKEY = 'authentication.backends.pubkey.PublicKeyAuthBackend'
|
||||
AUTH_BACKEND_LDAP = 'authentication.backends.ldap.LDAPAuthorizationBackend'
|
||||
AUTH_BACKEND_LDAP_HA = 'authentication.backends.ldap.LDAPHAAuthorizationBackend'
|
||||
AUTH_BACKEND_OIDC_PASSWORD = 'authentication.backends.oidc.OIDCAuthPasswordBackend'
|
||||
AUTH_BACKEND_OIDC_CODE = 'authentication.backends.oidc.OIDCAuthCodeBackend'
|
||||
AUTH_BACKEND_RADIUS = 'authentication.backends.radius.RadiusBackend'
|
||||
|
@ -232,7 +271,7 @@ AUTHENTICATION_BACKENDS = [
|
|||
# 只做权限校验
|
||||
RBAC_BACKEND,
|
||||
# 密码形式
|
||||
AUTH_BACKEND_MODEL, AUTH_BACKEND_PUBKEY, AUTH_BACKEND_LDAP, AUTH_BACKEND_RADIUS,
|
||||
AUTH_BACKEND_MODEL, AUTH_BACKEND_PUBKEY, AUTH_BACKEND_LDAP, AUTH_BACKEND_LDAP_HA, AUTH_BACKEND_RADIUS,
|
||||
# 跳转形式
|
||||
AUTH_BACKEND_CAS, AUTH_BACKEND_OIDC_PASSWORD, AUTH_BACKEND_OIDC_CODE, AUTH_BACKEND_SAML2,
|
||||
AUTH_BACKEND_OAUTH2,
|
||||
|
|
|
@ -50,37 +50,33 @@ LOGGING = {
|
|||
'file': {
|
||||
'encoding': 'utf8',
|
||||
'level': 'DEBUG',
|
||||
'class': 'logging.handlers.RotatingFileHandler',
|
||||
'maxBytes': 1024 * 1024 * 100,
|
||||
'backupCount': 7,
|
||||
'class': 'jumpserver.rewriting.logging.DailyTimedRotatingFileHandler',
|
||||
'when': 'midnight',
|
||||
'formatter': 'main',
|
||||
'filename': JUMPSERVER_LOG_FILE,
|
||||
},
|
||||
'ansible_logs': {
|
||||
'encoding': 'utf8',
|
||||
'level': 'DEBUG',
|
||||
'class': 'logging.handlers.RotatingFileHandler',
|
||||
'class': 'jumpserver.rewriting.logging.DailyTimedRotatingFileHandler',
|
||||
'when': 'midnight',
|
||||
'formatter': 'main',
|
||||
'maxBytes': 1024 * 1024 * 100,
|
||||
'backupCount': 7,
|
||||
'filename': ANSIBLE_LOG_FILE,
|
||||
},
|
||||
'drf_exception': {
|
||||
'encoding': 'utf8',
|
||||
'level': 'DEBUG',
|
||||
'class': 'logging.handlers.RotatingFileHandler',
|
||||
'class': 'jumpserver.rewriting.logging.DailyTimedRotatingFileHandler',
|
||||
'when': 'midnight',
|
||||
'formatter': 'exception',
|
||||
'maxBytes': 1024 * 1024 * 100,
|
||||
'backupCount': 7,
|
||||
'filename': DRF_EXCEPTION_LOG_FILE,
|
||||
},
|
||||
'unexpected_exception': {
|
||||
'encoding': 'utf8',
|
||||
'level': 'DEBUG',
|
||||
'class': 'logging.handlers.RotatingFileHandler',
|
||||
'class': 'jumpserver.rewriting.logging.DailyTimedRotatingFileHandler',
|
||||
'when': 'midnight',
|
||||
'formatter': 'exception',
|
||||
'maxBytes': 1024 * 1024 * 100,
|
||||
'backupCount': 7,
|
||||
'filename': UNEXPECTED_EXCEPTION_LOG_FILE,
|
||||
},
|
||||
'syslog': {
|
||||
|
@ -155,3 +151,4 @@ if CONFIG.SYSLOG_ADDR != '' and len(CONFIG.SYSLOG_ADDR.split(':')) == 2:
|
|||
|
||||
if not os.path.isdir(LOG_DIR):
|
||||
os.makedirs(LOG_DIR, mode=0o755)
|
||||
|
||||
|
|
|
@ -70,8 +70,8 @@ class RDPConnectionManager:
|
|||
connection_details = {
|
||||
'hostname': self.params['login_host'],
|
||||
'port': self.params['login_port'],
|
||||
'username': self.params['username'],
|
||||
'password': self.params['password']
|
||||
'username': self.params['login_user'],
|
||||
'password': self.params['login_password']
|
||||
}
|
||||
return connection_details
|
||||
|
||||
|
|
|
@ -1,7 +1,6 @@
|
|||
import textwrap
|
||||
import traceback
|
||||
from itertools import chain
|
||||
from typing import Iterable
|
||||
|
||||
from celery import shared_task
|
||||
from django.utils.translation import gettext_lazy as _
|
||||
|
@ -43,7 +42,13 @@ class MessageType(type):
|
|||
return clz
|
||||
|
||||
|
||||
@shared_task(verbose_name=_('Publish the station message'))
|
||||
@shared_task(
|
||||
verbose_name=_('Publish the station message'),
|
||||
description=_(
|
||||
"""This task needs to be executed for sending internal messages for system alerts,
|
||||
work orders, and other notifications"""
|
||||
)
|
||||
)
|
||||
def publish_task(receive_user_ids, backends_msg_mapper):
|
||||
Message.send_msg(receive_user_ids, backends_msg_mapper)
|
||||
|
||||
|
|
|
@ -45,24 +45,34 @@ class JMSInventory:
|
|||
return groups
|
||||
|
||||
@staticmethod
|
||||
def make_proxy_command(gateway, path_dir):
|
||||
def get_gateway_ssh_settings(gateway):
|
||||
platform = gateway.platform
|
||||
try:
|
||||
protocol = platform.protocols.get(name='ssh')
|
||||
except platform.protocols.model.DoesNotExist:
|
||||
return {}
|
||||
return protocol.setting
|
||||
|
||||
def make_proxy_command(self, gateway, path_dir):
|
||||
proxy_command_list = [
|
||||
"ssh", "-o", "Port={}".format(gateway.port),
|
||||
"-o", "StrictHostKeyChecking=no",
|
||||
"{}@{}".format(gateway.username, gateway.address),
|
||||
"-W", "%h:%p", "-q",
|
||||
f"{gateway.username}@{gateway.address}"
|
||||
]
|
||||
|
||||
if gateway.password:
|
||||
proxy_command_list.insert(
|
||||
0, "sshpass -p {}".format(gateway.password)
|
||||
)
|
||||
if gateway.private_key:
|
||||
proxy_command_list.append("-i {}".format(gateway.get_private_key_path(path_dir)))
|
||||
setting = self.get_gateway_ssh_settings(gateway)
|
||||
if setting.get('nc', False):
|
||||
proxy_command_list.extend(["nc", "-w", "10", "%h", "%p"])
|
||||
else:
|
||||
proxy_command_list.extend(["-W", "%h:%p", "-q"])
|
||||
|
||||
proxy_command = "-o ProxyCommand='{}'".format(
|
||||
" ".join(proxy_command_list)
|
||||
)
|
||||
if gateway.password:
|
||||
proxy_command_list.insert(0, f"sshpass -p {gateway.password}")
|
||||
|
||||
if gateway.private_key:
|
||||
proxy_command_list.append(f"-i {gateway.get_private_key_path(path_dir)}")
|
||||
|
||||
proxy_command = f"-o ProxyCommand='{' '.join(proxy_command_list)}'"
|
||||
return {"ansible_ssh_common_args": proxy_command}
|
||||
|
||||
@staticmethod
|
||||
|
|
|
@ -1,22 +1,37 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
from orgs.mixins.api import OrgBulkModelViewSet
|
||||
from django.db.models import Q
|
||||
|
||||
from common.api.generic import JMSBulkModelViewSet
|
||||
from common.utils.http import is_true
|
||||
from rbac.permissions import RBACPermission
|
||||
from ..const import Scope
|
||||
from ..models import AdHoc
|
||||
from ..serializers import (
|
||||
AdHocSerializer
|
||||
)
|
||||
from ..serializers import AdHocSerializer
|
||||
|
||||
__all__ = [
|
||||
'AdHocViewSet'
|
||||
]
|
||||
|
||||
|
||||
class AdHocViewSet(OrgBulkModelViewSet):
|
||||
class AdHocViewSet(JMSBulkModelViewSet):
|
||||
queryset = AdHoc.objects.all()
|
||||
serializer_class = AdHocSerializer
|
||||
permission_classes = (RBACPermission,)
|
||||
search_fields = ('name', 'comment')
|
||||
model = AdHoc
|
||||
filterset_fields = ['scope', 'creator']
|
||||
|
||||
def check_object_permissions(self, request, obj):
|
||||
if request.method != 'GET' and obj.creator != request.user:
|
||||
self.permission_denied(
|
||||
request, message={"detail": "Deleting other people's script is not allowed"}
|
||||
)
|
||||
return super().check_object_permissions(request, obj)
|
||||
|
||||
def get_queryset(self):
|
||||
queryset = super().get_queryset()
|
||||
return queryset.filter(creator=self.request.user)
|
||||
user = self.request.user
|
||||
if is_true(self.request.query_params.get('only_mine')):
|
||||
queryset = queryset.filter(creator=user)
|
||||
else:
|
||||
queryset = queryset.filter(Q(creator=user) | Q(scope=Scope.public))
|
||||
return queryset
|
||||
|
|
|
@ -4,13 +4,16 @@ import zipfile
|
|||
|
||||
from django.conf import settings
|
||||
from django.core.exceptions import SuspiciousFileOperation
|
||||
from django.db.models import Q
|
||||
from django.shortcuts import get_object_or_404
|
||||
from django.utils.translation import gettext_lazy as _
|
||||
from rest_framework import status
|
||||
|
||||
from common.api.generic import JMSBulkModelViewSet
|
||||
from common.exceptions import JMSException
|
||||
from orgs.mixins.api import OrgBulkModelViewSet
|
||||
from common.utils.http import is_true
|
||||
from rbac.permissions import RBACPermission
|
||||
from ..const import Scope
|
||||
from ..exception import PlaybookNoValidEntry
|
||||
from ..models import Playbook
|
||||
from ..serializers.playbook import PlaybookSerializer
|
||||
|
@ -28,11 +31,19 @@ def unzip_playbook(src, dist):
|
|||
fz.extract(file, dist)
|
||||
|
||||
|
||||
class PlaybookViewSet(OrgBulkModelViewSet):
|
||||
class PlaybookViewSet(JMSBulkModelViewSet):
|
||||
serializer_class = PlaybookSerializer
|
||||
permission_classes = (RBACPermission,)
|
||||
model = Playbook
|
||||
queryset = Playbook.objects.all()
|
||||
search_fields = ('name', 'comment')
|
||||
filterset_fields = ['scope', 'creator']
|
||||
|
||||
def check_object_permissions(self, request, obj):
|
||||
if request.method != 'GET' and obj.creator != request.user:
|
||||
self.permission_denied(
|
||||
request, message={"detail": "Deleting other people's playbook is not allowed"}
|
||||
)
|
||||
return super().check_object_permissions(request, obj)
|
||||
|
||||
def perform_destroy(self, instance):
|
||||
if instance.job_set.exists():
|
||||
|
@ -45,7 +56,11 @@ class PlaybookViewSet(OrgBulkModelViewSet):
|
|||
|
||||
def get_queryset(self):
|
||||
queryset = super().get_queryset()
|
||||
queryset = queryset.filter(creator=self.request.user)
|
||||
user = self.request.user
|
||||
if is_true(self.request.query_params.get('only_mine')):
|
||||
queryset = queryset.filter(creator=user)
|
||||
else:
|
||||
queryset = queryset.filter(Q(creator=user) | Q(scope=Scope.public))
|
||||
return queryset
|
||||
|
||||
def perform_create(self, serializer):
|
||||
|
@ -85,7 +100,8 @@ class PlaybookFileBrowserAPIView(APIView):
|
|||
|
||||
def get(self, request, **kwargs):
|
||||
playbook_id = kwargs.get('pk')
|
||||
playbook = self.get_playbook(playbook_id)
|
||||
user = self.request.user
|
||||
playbook = get_object_or_404(Playbook, Q(creator=user) | Q(scope=Scope.public), id=playbook_id)
|
||||
work_path = playbook.work_dir
|
||||
file_key = request.query_params.get('key', '')
|
||||
if file_key:
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
from django.db import models
|
||||
from django.utils.translation import gettext_lazy as _
|
||||
from django.utils.translation import gettext_lazy as _, pgettext_lazy
|
||||
|
||||
|
||||
class StrategyChoice(models.TextChoices):
|
||||
|
@ -80,3 +80,8 @@ class JobStatus(models.TextChoices):
|
|||
CELERY_LOG_MAGIC_MARK = b'\x00\x00\x00\x00\x00'
|
||||
|
||||
COMMAND_EXECUTION_DISABLED = _('Command execution disabled')
|
||||
|
||||
|
||||
class Scope(models.TextChoices):
|
||||
public = 'public', pgettext_lazy("scope", 'Public')
|
||||
private = 'private', _('Private')
|
||||
|
|
|
@ -0,0 +1,61 @@
|
|||
# Generated by Django 4.1.13 on 2024-09-06 08:32
|
||||
|
||||
from django.conf import settings
|
||||
from django.db import migrations, models
|
||||
from orgs.models import Organization
|
||||
|
||||
|
||||
def migrate_ops_adhoc_and_playbook_name(apps, schema_editor):
|
||||
Adhoc = apps.get_model('ops', 'adhoc')
|
||||
Playbook = apps.get_model('ops', 'playbook')
|
||||
Organization = apps.get_model('orgs', 'Organization')
|
||||
org_id_name_mapper = {str(org.id): org.name for org in Organization.objects.all()}
|
||||
|
||||
adhocs_to_update = Adhoc.objects.exclude(org_id=Organization.DEFAULT_ID)
|
||||
for adhoc in adhocs_to_update:
|
||||
suffix = org_id_name_mapper.get(str(adhoc.org_id), str(adhoc.id)[:6])
|
||||
adhoc.name = f'{adhoc.name} ({suffix})'
|
||||
Adhoc.objects.bulk_update(adhocs_to_update, ['name'])
|
||||
|
||||
playbooks_to_update = Playbook.objects.exclude(org_id=Organization.DEFAULT_ID)
|
||||
for playbook in playbooks_to_update:
|
||||
suffix = org_id_name_mapper.get(str(playbook.org_id), str(playbook.id)[:6])
|
||||
playbook.name = f'{playbook.name} ({suffix})'
|
||||
Playbook.objects.bulk_update(playbooks_to_update, ['name'])
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
dependencies = [
|
||||
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
|
||||
('ops', '0002_celerytask'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.RunPython(migrate_ops_adhoc_and_playbook_name),
|
||||
migrations.AlterUniqueTogether(
|
||||
name='adhoc',
|
||||
unique_together={('name', 'creator')},
|
||||
),
|
||||
migrations.AlterUniqueTogether(
|
||||
name='playbook',
|
||||
unique_together={('name', 'creator')},
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='adhoc',
|
||||
name='scope',
|
||||
field=models.CharField(default='public', max_length=64, verbose_name='Scope'),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='playbook',
|
||||
name='scope',
|
||||
field=models.CharField(default='public', max_length=64, verbose_name='Scope'),
|
||||
),
|
||||
migrations.RemoveField(
|
||||
model_name='adhoc',
|
||||
name='org_id',
|
||||
),
|
||||
migrations.RemoveField(
|
||||
model_name='playbook',
|
||||
name='org_id',
|
||||
),
|
||||
]
|
|
@ -16,6 +16,13 @@ __all__ = [
|
|||
]
|
||||
|
||||
|
||||
class PeriodTaskModelQuerySet(models.QuerySet):
|
||||
def delete(self, *args, **kwargs):
|
||||
for obj in self:
|
||||
obj.delete()
|
||||
return super().delete(*args, **kwargs)
|
||||
|
||||
|
||||
class PeriodTaskModelMixin(models.Model):
|
||||
name = models.CharField(
|
||||
max_length=128, unique=False, verbose_name=_("Name")
|
||||
|
@ -27,6 +34,7 @@ class PeriodTaskModelMixin(models.Model):
|
|||
crontab = models.CharField(
|
||||
blank=True, max_length=128, null=True, verbose_name=_("Crontab"),
|
||||
)
|
||||
objects = PeriodTaskModelQuerySet.as_manager()
|
||||
|
||||
@abc.abstractmethod
|
||||
def get_register_task(self):
|
||||
|
|
|
@ -8,14 +8,13 @@ from common.utils import get_logger
|
|||
|
||||
__all__ = ["AdHoc"]
|
||||
|
||||
from ops.const import AdHocModules
|
||||
|
||||
from orgs.mixins.models import JMSOrgBaseModel
|
||||
from common.db.models import JMSBaseModel
|
||||
from ops.const import AdHocModules, Scope
|
||||
|
||||
logger = get_logger(__file__)
|
||||
|
||||
|
||||
class AdHoc(JMSOrgBaseModel):
|
||||
class AdHoc(JMSBaseModel):
|
||||
id = models.UUIDField(default=uuid.uuid4, primary_key=True)
|
||||
name = models.CharField(max_length=128, verbose_name=_('Name'))
|
||||
pattern = models.CharField(max_length=1024, verbose_name=_("Pattern"), default='all')
|
||||
|
@ -24,6 +23,7 @@ class AdHoc(JMSOrgBaseModel):
|
|||
args = models.CharField(max_length=8192, default='', verbose_name=_('Args'))
|
||||
creator = models.ForeignKey('users.User', verbose_name=_("Creator"), on_delete=models.SET_NULL, null=True)
|
||||
comment = models.CharField(max_length=1024, default='', verbose_name=_('Comment'), null=True, blank=True)
|
||||
scope = models.CharField(max_length=64, default=Scope.public, verbose_name=_('Scope'))
|
||||
|
||||
@property
|
||||
def row_count(self):
|
||||
|
@ -40,5 +40,5 @@ class AdHoc(JMSOrgBaseModel):
|
|||
return "{}: {}".format(self.module, self.args)
|
||||
|
||||
class Meta:
|
||||
unique_together = [('name', 'org_id', 'creator')]
|
||||
unique_together = [('name', 'creator')]
|
||||
verbose_name = _("Adhoc")
|
||||
|
|
|
@ -23,6 +23,7 @@ class CeleryTask(models.Model):
|
|||
task = app.tasks.get(self.name, None)
|
||||
return {
|
||||
"comment": getattr(task, 'verbose_name', None),
|
||||
"description": getattr(task, 'description', None),
|
||||
"queue": getattr(task, 'queue', 'default')
|
||||
}
|
||||
|
||||
|
|
|
@ -6,9 +6,9 @@ from django.db import models
|
|||
from django.utils.translation import gettext_lazy as _
|
||||
from private_storage.fields import PrivateFileField
|
||||
|
||||
from ops.const import CreateMethods
|
||||
from common.db.models import JMSBaseModel
|
||||
from ops.const import CreateMethods, Scope
|
||||
from ops.exception import PlaybookNoValidEntry
|
||||
from orgs.mixins.models import JMSOrgBaseModel
|
||||
|
||||
dangerous_keywords = (
|
||||
'hosts:localhost',
|
||||
|
@ -23,7 +23,9 @@ dangerous_keywords = (
|
|||
)
|
||||
|
||||
|
||||
class Playbook(JMSOrgBaseModel):
|
||||
|
||||
|
||||
class Playbook(JMSBaseModel):
|
||||
id = models.UUIDField(default=uuid.uuid4, primary_key=True)
|
||||
name = models.CharField(max_length=128, verbose_name=_('Name'), null=True)
|
||||
path = PrivateFileField(upload_to='playbooks/')
|
||||
|
@ -31,6 +33,7 @@ class Playbook(JMSOrgBaseModel):
|
|||
comment = models.CharField(max_length=1024, default='', verbose_name=_('Comment'), null=True, blank=True)
|
||||
create_method = models.CharField(max_length=128, choices=CreateMethods.choices, default=CreateMethods.blank,
|
||||
verbose_name=_('CreateMethod'))
|
||||
scope = models.CharField(max_length=64, default=Scope.public, verbose_name=_('Scope'))
|
||||
vcs_url = models.CharField(max_length=1024, default='', verbose_name=_('VCS URL'), null=True, blank=True)
|
||||
|
||||
def __str__(self):
|
||||
|
@ -84,6 +87,6 @@ class Playbook(JMSOrgBaseModel):
|
|||
return work_dir
|
||||
|
||||
class Meta:
|
||||
unique_together = [('name', 'org_id', 'creator')]
|
||||
unique_together = [('name', 'creator')]
|
||||
verbose_name = _("Playbook")
|
||||
ordering = ['date_created']
|
||||
|
|
|
@ -1,17 +1,19 @@
|
|||
# ~*~ coding: utf-8 ~*~
|
||||
from __future__ import unicode_literals
|
||||
|
||||
from django.utils.translation import gettext_lazy as _
|
||||
from rest_framework import serializers
|
||||
|
||||
from common.serializers.fields import ReadableHiddenField
|
||||
from orgs.mixins.serializers import BulkOrgResourceModelSerializer
|
||||
from common.serializers.fields import ReadableHiddenField, LabeledChoiceField
|
||||
from common.serializers.mixin import CommonBulkModelSerializer
|
||||
from .mixin import ScopeSerializerMixin
|
||||
from ..const import Scope
|
||||
from ..models import AdHoc
|
||||
|
||||
|
||||
class AdHocSerializer(BulkOrgResourceModelSerializer):
|
||||
class AdHocSerializer(ScopeSerializerMixin, CommonBulkModelSerializer):
|
||||
creator = ReadableHiddenField(default=serializers.CurrentUserDefault())
|
||||
|
||||
class Meta:
|
||||
model = AdHoc
|
||||
read_only_field = ["id", "creator", "date_created", "date_updated"]
|
||||
fields = read_only_field + ["id", "name", "module", "args", "comment"]
|
||||
fields = read_only_field + ["id", "name", "scope", "module", "args", "comment"]
|
||||
|
|
|
@ -0,0 +1,11 @@
|
|||
from django.utils.translation import gettext_lazy as _
|
||||
from rest_framework import serializers
|
||||
|
||||
from common.serializers.fields import LabeledChoiceField
|
||||
from ..const import Scope
|
||||
|
||||
|
||||
class ScopeSerializerMixin(serializers.Serializer):
|
||||
scope = LabeledChoiceField(
|
||||
choices=Scope.choices, default=Scope.public, label=_("Scope")
|
||||
)
|
|
@ -3,8 +3,9 @@ import os
|
|||
from rest_framework import serializers
|
||||
|
||||
from common.serializers.fields import ReadableHiddenField
|
||||
from common.serializers.mixin import CommonBulkModelSerializer
|
||||
from ops.models import Playbook
|
||||
from orgs.mixins.serializers import BulkOrgResourceModelSerializer
|
||||
from .mixin import ScopeSerializerMixin
|
||||
|
||||
|
||||
def parse_playbook_name(path):
|
||||
|
@ -12,7 +13,7 @@ def parse_playbook_name(path):
|
|||
return file_name.split(".")[-2]
|
||||
|
||||
|
||||
class PlaybookSerializer(BulkOrgResourceModelSerializer):
|
||||
class PlaybookSerializer(ScopeSerializerMixin, CommonBulkModelSerializer):
|
||||
creator = ReadableHiddenField(default=serializers.CurrentUserDefault())
|
||||
path = serializers.FileField(required=False)
|
||||
|
||||
|
@ -26,6 +27,6 @@ class PlaybookSerializer(BulkOrgResourceModelSerializer):
|
|||
model = Playbook
|
||||
read_only_fields = ["id", "date_created", "date_updated"]
|
||||
fields = read_only_fields + [
|
||||
"id", 'path', "name", "comment", "creator",
|
||||
"id", 'path', 'scope', "name", "comment", "creator",
|
||||
'create_method', 'vcs_url',
|
||||
]
|
||||
|
|
|
@ -12,7 +12,7 @@ from common.utils import get_logger, get_object_or_none, get_log_keep_day
|
|||
from ops.celery import app
|
||||
from orgs.utils import tmp_to_org, tmp_to_root_org
|
||||
from .celery.decorator import (
|
||||
register_as_period_task, after_app_ready_start, after_app_shutdown_clean_periodic
|
||||
register_as_period_task, after_app_ready_start
|
||||
)
|
||||
from .celery.utils import (
|
||||
create_or_update_celery_periodic_tasks, get_celery_periodic_task,
|
||||
|
@ -46,8 +46,13 @@ def _run_ops_job_execution(execution):
|
|||
|
||||
|
||||
@shared_task(
|
||||
soft_time_limit=60, queue="ansible", verbose_name=_("Run ansible task"),
|
||||
activity_callback=job_task_activity_callback
|
||||
soft_time_limit=60,
|
||||
queue="ansible",
|
||||
verbose_name=_("Run ansible task"),
|
||||
activity_callback=job_task_activity_callback,
|
||||
description=_(
|
||||
"Execute scheduled adhoc and playbooks, periodically invoking the task for execution"
|
||||
)
|
||||
)
|
||||
def run_ops_job(job_id):
|
||||
with tmp_to_root_org():
|
||||
|
@ -72,8 +77,13 @@ def job_execution_task_activity_callback(self, execution_id, *args, **kwargs):
|
|||
|
||||
|
||||
@shared_task(
|
||||
soft_time_limit=60, queue="ansible", verbose_name=_("Run ansible task execution"),
|
||||
activity_callback=job_execution_task_activity_callback
|
||||
soft_time_limit=60,
|
||||
queue="ansible",
|
||||
verbose_name=_("Run ansible task execution"),
|
||||
activity_callback=job_execution_task_activity_callback,
|
||||
description=_(
|
||||
"Execute the task when manually adhoc or playbooks"
|
||||
)
|
||||
)
|
||||
def run_ops_job_execution(execution_id, **kwargs):
|
||||
with tmp_to_root_org():
|
||||
|
@ -85,7 +95,12 @@ def run_ops_job_execution(execution_id, **kwargs):
|
|||
_run_ops_job_execution(execution)
|
||||
|
||||
|
||||
@shared_task(verbose_name=_('Clear celery periodic tasks'))
|
||||
@shared_task(
|
||||
verbose_name=_('Clear celery periodic tasks'),
|
||||
description=_(
|
||||
"At system startup, clean up celery tasks that no longer exist"
|
||||
)
|
||||
)
|
||||
@after_app_ready_start
|
||||
def clean_celery_periodic_tasks():
|
||||
"""清除celery定时任务"""
|
||||
|
@ -106,7 +121,14 @@ def clean_celery_periodic_tasks():
|
|||
logger.info('Clean task failure: {}'.format(task))
|
||||
|
||||
|
||||
@shared_task(verbose_name=_('Create or update periodic tasks'))
|
||||
@shared_task(
|
||||
verbose_name=_('Create or update periodic tasks'),
|
||||
description=_(
|
||||
"""With version iterations, new tasks may be added, or task names and execution times may
|
||||
be modified. Therefore, upon system startup, tasks will be registered or the parameters
|
||||
of scheduled tasks will be updated"""
|
||||
)
|
||||
)
|
||||
@after_app_ready_start
|
||||
def create_or_update_registered_periodic_tasks():
|
||||
from .celery.decorator import get_register_period_tasks
|
||||
|
@ -114,20 +136,42 @@ def create_or_update_registered_periodic_tasks():
|
|||
create_or_update_celery_periodic_tasks(task)
|
||||
|
||||
|
||||
@shared_task(verbose_name=_("Periodic check service performance"))
|
||||
@shared_task(
|
||||
verbose_name=_("Periodic check service performance"),
|
||||
description=_(
|
||||
"""Check every hour whether each component is offline and whether the CPU, memory,
|
||||
and disk usage exceed the thresholds, and send an alert message to the administrator"""
|
||||
)
|
||||
)
|
||||
@register_as_period_task(interval=3600)
|
||||
def check_server_performance_period():
|
||||
ServerPerformanceCheckUtil().check_and_publish()
|
||||
|
||||
|
||||
@shared_task(verbose_name=_("Clean up unexpected jobs"))
|
||||
@shared_task(
|
||||
verbose_name=_("Clean up unexpected jobs"),
|
||||
description=_(
|
||||
"""Due to exceptions caused by executing adhoc and playbooks in the Job Center,
|
||||
which result in the task status not being updated, the system will clean up abnormal jobs
|
||||
that have not been completed for more than 3 hours every hour and mark these tasks as
|
||||
failed"""
|
||||
)
|
||||
)
|
||||
@register_as_period_task(interval=3600)
|
||||
def clean_up_unexpected_jobs():
|
||||
with tmp_to_root_org():
|
||||
JobExecution.clean_unexpected_execution()
|
||||
|
||||
|
||||
@shared_task(verbose_name=_('Clean job_execution db record'))
|
||||
@shared_task(
|
||||
verbose_name=_('Clean job_execution db record'),
|
||||
description=_(
|
||||
"""Due to the execution of adhoc and playbooks in the Job Center, execution records will
|
||||
be generated. The system will clean up records that exceed the retention period every day
|
||||
at 2 a.m., based on the configuration of 'System Settings - Tasks - Regular clean-up -
|
||||
Job execution retention days'"""
|
||||
)
|
||||
)
|
||||
@register_as_period_task(crontab=CRONTAB_AT_AM_TWO)
|
||||
def clean_job_execution_period():
|
||||
logger.info("Start clean job_execution db record")
|
||||
|
@ -136,7 +180,8 @@ def clean_job_execution_period():
|
|||
expired_day = now - datetime.timedelta(days=days)
|
||||
with tmp_to_root_org():
|
||||
del_res = JobExecution.objects.filter(date_created__lt=expired_day).delete()
|
||||
logger.info(f"clean job_execution db record success! delete {days} days {del_res[0]} records")
|
||||
logger.info(
|
||||
f"clean job_execution db record success! delete {days} days {del_res[0]} records")
|
||||
|
||||
# 测试使用,注释隐藏
|
||||
# @shared_task
|
||||
|
|
|
@ -23,7 +23,7 @@ class OrgResourceSerializerMixin(serializers.Serializer):
|
|||
但是coco需要资产的org_id字段,所以修改为CharField类型
|
||||
"""
|
||||
org_id = serializers.ReadOnlyField(default=get_current_org_id_for_serializer, label=_("Organization"))
|
||||
org_name = serializers.ReadOnlyField(label=_("Org name"))
|
||||
org_name = serializers.CharField(label=_("Org name"), read_only=True)
|
||||
add_org_fields = True
|
||||
|
||||
def get_validators(self):
|
||||
|
|
|
@ -6,7 +6,10 @@ from common.utils import get_logger
|
|||
logger = get_logger(__file__)
|
||||
|
||||
|
||||
@shared_task(verbose_name=_("Refresh organization cache"))
|
||||
@shared_task(
|
||||
verbose_name=_("Refresh organization cache"),
|
||||
description=_("Unused")
|
||||
)
|
||||
def refresh_org_cache_task(*fields):
|
||||
from .caches import OrgResourceStatisticsCache
|
||||
OrgResourceStatisticsCache.refresh(*fields)
|
||||
|
|
|
@ -27,6 +27,17 @@ class ActionChoicesField(BitChoicesField):
|
|||
return data
|
||||
|
||||
|
||||
class PermAccountsSerializer(serializers.ListField):
|
||||
def get_render_help_text(self):
|
||||
return _('Accounts, format ["@virtual", "root", "%template_id"], '
|
||||
'virtual choices: @ALL, @SPEC, @USER, @ANON, @INPUT')
|
||||
|
||||
|
||||
class PermProtocolsSerializer(serializers.ListField):
|
||||
def get_render_help_text(self):
|
||||
return _('Protocols, format ["ssh", "rdp", "vnc"] or ["all"]')
|
||||
|
||||
|
||||
class AssetPermissionSerializer(ResourceLabelsMixin, BulkOrgResourceModelSerializer):
|
||||
users = ObjectRelatedField(queryset=User.objects, many=True, required=False, label=_('Users'))
|
||||
user_groups = ObjectRelatedField(
|
||||
|
@ -41,8 +52,8 @@ class AssetPermissionSerializer(ResourceLabelsMixin, BulkOrgResourceModelSeriali
|
|||
actions = ActionChoicesField(required=False, allow_null=True, label=_("Action"))
|
||||
is_valid = serializers.BooleanField(read_only=True, label=_("Is valid"))
|
||||
is_expired = serializers.BooleanField(read_only=True, label=_("Is expired"))
|
||||
accounts = serializers.ListField(label=_("Accounts"), required=False)
|
||||
protocols = serializers.ListField(label=_("Protocols"), required=False)
|
||||
accounts = PermAccountsSerializer(label=_("Accounts"), required=False)
|
||||
protocols = PermProtocolsSerializer(label=_("Protocols"), required=False)
|
||||
|
||||
template_accounts = AccountTemplate.objects.none()
|
||||
|
||||
|
|
|
@ -24,7 +24,15 @@ from perms.utils import UserPermTreeExpireUtil
|
|||
logger = get_logger(__file__)
|
||||
|
||||
|
||||
@shared_task(verbose_name=_('Check asset permission expired'))
|
||||
@shared_task(
|
||||
verbose_name=_('Check asset permission expired'),
|
||||
description=_(
|
||||
"""The cache of organizational collections, which have completed user authorization tree
|
||||
construction, will expire. Therefore, expired collections need to be cleared from the
|
||||
cache, and this task will be executed periodically based on the time interval specified
|
||||
by PERM_EXPIRED_CHECK_PERIODIC in the system configuration file config.txt"""
|
||||
)
|
||||
)
|
||||
@register_as_period_task(interval=settings.PERM_EXPIRED_CHECK_PERIODIC)
|
||||
@atomic()
|
||||
@tmp_to_root_org()
|
||||
|
@ -37,7 +45,15 @@ def check_asset_permission_expired():
|
|||
UserPermTreeExpireUtil().expire_perm_tree_for_perms(perm_ids)
|
||||
|
||||
|
||||
@shared_task(verbose_name=_('Send asset permission expired notification'))
|
||||
@shared_task(
|
||||
verbose_name=_('Send asset permission expired notification'),
|
||||
description=_(
|
||||
"""Check every day at 10 a.m. and send a notification message to users associated with
|
||||
assets whose authorization is about to expire, as well as to the organization's
|
||||
administrators, 3 days in advance, to remind them that the asset authorization will
|
||||
expire in a few days"""
|
||||
)
|
||||
)
|
||||
@register_as_period_task(crontab=CRONTAB_AT_AM_TEN)
|
||||
@atomic()
|
||||
@tmp_to_root_org()
|
||||
|
|
|
@ -26,12 +26,14 @@ class LDAPUserListApi(generics.ListAPIView):
|
|||
|
||||
def get_queryset_from_cache(self):
|
||||
search_value = self.request.query_params.get('search')
|
||||
users = LDAPCacheUtil().search(search_value=search_value)
|
||||
category = self.request.query_params.get('category')
|
||||
users = LDAPCacheUtil(category=category).search(search_value=search_value)
|
||||
return users
|
||||
|
||||
def get_queryset_from_server(self):
|
||||
search_value = self.request.query_params.get('search')
|
||||
users = LDAPServerUtil().search(search_value=search_value)
|
||||
category = self.request.query_params.get('category')
|
||||
users = LDAPServerUtil(category=category).search(search_value=search_value)
|
||||
return users
|
||||
|
||||
def get_queryset(self):
|
||||
|
|
|
@ -36,6 +36,7 @@ class SettingsApi(generics.RetrieveUpdateAPIView):
|
|||
'security_password': serializers.SecurityPasswordRuleSerializer,
|
||||
'security_login_limit': serializers.SecurityLoginLimitSerializer,
|
||||
'ldap': serializers.LDAPSettingSerializer,
|
||||
'ldap_ha': serializers.LDAPHASettingSerializer,
|
||||
'email': serializers.EmailSettingSerializer,
|
||||
'email_content': serializers.EmailContentSettingSerializer,
|
||||
'wecom': serializers.WeComSettingSerializer,
|
||||
|
|
|
@ -4,6 +4,7 @@ from .dingtalk import *
|
|||
from .feishu import *
|
||||
from .lark import *
|
||||
from .ldap import *
|
||||
from .ldap_ha import *
|
||||
from .oauth2 import *
|
||||
from .oidc import *
|
||||
from .passkey import *
|
||||
|
|
|
@ -11,6 +11,7 @@ class AuthSettingSerializer(serializers.Serializer):
|
|||
PREFIX_TITLE = _('Authentication')
|
||||
|
||||
AUTH_LDAP = serializers.BooleanField(required=False, label=_('LDAP Auth'))
|
||||
AUTH_LDAP_HA = serializers.BooleanField(required=False, label=_('LDAP Auth HA'))
|
||||
AUTH_CAS = serializers.BooleanField(required=False, label=_('CAS Auth'))
|
||||
AUTH_OPENID = serializers.BooleanField(required=False, label=_('OPENID Auth'))
|
||||
AUTH_SAML2 = serializers.BooleanField(default=False, label=_("SAML2 Auth"))
|
||||
|
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue