mirror of https://github.com/jumpserver/jumpserver
merge: with dev
commit
eccbf46300
|
@ -77,7 +77,7 @@ RUN --mount=type=cache,target=/root/.cache/pip \
|
||||||
&& pip install https://download.jumpserver.org/pypi/simple/cryptography/cryptography-38.0.4-cp39-cp39-linux_loongarch64.whl \
|
&& pip install https://download.jumpserver.org/pypi/simple/cryptography/cryptography-38.0.4-cp39-cp39-linux_loongarch64.whl \
|
||||||
&& pip install https://download.jumpserver.org/pypi/simple/greenlet/greenlet-1.1.2-cp39-cp39-linux_loongarch64.whl \
|
&& pip install https://download.jumpserver.org/pypi/simple/greenlet/greenlet-1.1.2-cp39-cp39-linux_loongarch64.whl \
|
||||||
&& pip install https://download.jumpserver.org/pypi/simple/PyNaCl/PyNaCl-1.5.0-cp39-cp39-linux_loongarch64.whl \
|
&& pip install https://download.jumpserver.org/pypi/simple/PyNaCl/PyNaCl-1.5.0-cp39-cp39-linux_loongarch64.whl \
|
||||||
&& pip install https://download.jumpserver.org/pypi/simple/grpcio/grpcio-1.54.0-cp39-cp39-linux_loongarch64.whl \
|
&& pip install https://download.jumpserver.org/pypi/simple/grpcio/grpcio-1.54.2-cp39-cp39-linux_loongarch64.whl \
|
||||||
&& pip install $(grep -E 'jms|jumpserver' requirements/requirements.txt) -i ${PIP_JMS_MIRROR} \
|
&& pip install $(grep -E 'jms|jumpserver' requirements/requirements.txt) -i ${PIP_JMS_MIRROR} \
|
||||||
&& pip install -r requirements/requirements.txt
|
&& pip install -r requirements/requirements.txt
|
||||||
|
|
||||||
|
|
22
README.md
22
README.md
|
@ -23,7 +23,14 @@
|
||||||
|
|
||||||
--------------------------
|
--------------------------
|
||||||
|
|
||||||
JumpServer 是广受欢迎的开源堡垒机,是符合 4A 规范的专业运维安全审计系统。
|
JumpServer 是广受欢迎的开源堡垒机,是符合 4A 规范的专业运维安全审计系统。JumpServer 堡垒机帮助企业以更安全的方式管控和登录各种类型的资产,包括:
|
||||||
|
|
||||||
|
- **SSH**: Linux / Unix / 网络设备 等;
|
||||||
|
- **Windows**: Web 方式连接 / 原生 RDP 连接;
|
||||||
|
- **数据库**: MySQL / Oracle / SQLServer / PostgreSQL 等;
|
||||||
|
- **Kubernetes**: 支持连接到 K8s 集群中的 Pods;
|
||||||
|
- **Web 站点**: 各类系统的 Web 管理后台;
|
||||||
|
- **应用**: 通过 Remote App 连接各类应用。
|
||||||
|
|
||||||
## 产品特色
|
## 产品特色
|
||||||
|
|
||||||
|
@ -33,8 +40,6 @@ JumpServer 是广受欢迎的开源堡垒机,是符合 4A 规范的专业运
|
||||||
- **多云支持**: 一套系统,同时管理不同云上面的资产;
|
- **多云支持**: 一套系统,同时管理不同云上面的资产;
|
||||||
- **多租户**: 一套系统,多个子公司或部门同时使用;
|
- **多租户**: 一套系统,多个子公司或部门同时使用;
|
||||||
- **云端存储**: 审计录像云端存储,永不丢失;
|
- **云端存储**: 审计录像云端存储,永不丢失;
|
||||||
- **多应用支持**: 全面支持各类资产,包括服务器、数据库、Windows RemoteApp、Kubernetes 等;
|
|
||||||
- **安全可靠**: 被广泛使用、验证和信赖,连续 9 年的持续研发投入和产品更新升级。
|
|
||||||
|
|
||||||
## UI 展示
|
## UI 展示
|
||||||
|
|
||||||
|
@ -72,12 +77,13 @@ JumpServer 是广受欢迎的开源堡垒机,是符合 4A 规范的专业运
|
||||||
- [东方明珠:JumpServer高效管控异构化、分布式云端资产](https://blog.fit2cloud.com/?p=687)
|
- [东方明珠:JumpServer高效管控异构化、分布式云端资产](https://blog.fit2cloud.com/?p=687)
|
||||||
- [江苏农信:JumpServer堡垒机助力行业云安全运维](https://blog.fit2cloud.com/?p=666)
|
- [江苏农信:JumpServer堡垒机助力行业云安全运维](https://blog.fit2cloud.com/?p=666)
|
||||||
|
|
||||||
## 社区
|
## 社区交流
|
||||||
|
|
||||||
如果您在使用过程中有任何疑问或对建议,欢迎提交 [GitHub Issue](https://github.com/jumpserver/jumpserver/issues/new/choose)
|
如果您在使用过程中有任何疑问或对建议,欢迎提交 [GitHub Issue](https://github.com/jumpserver/jumpserver/issues/new/choose)。
|
||||||
或加入到我们的社区当中进行进一步交流沟通。
|
|
||||||
|
|
||||||
### 微信交流群
|
您也可以到我们的 [社区论坛](https://bbs.fit2cloud.com/c/js/5) 及微信交流群当中进行交流沟通。
|
||||||
|
|
||||||
|
**微信交流群**
|
||||||
|
|
||||||
<img src="https://download.jumpserver.org/images/wecom-group.jpeg" alt="微信群二维码" width="200"/>
|
<img src="https://download.jumpserver.org/images/wecom-group.jpeg" alt="微信群二维码" width="200"/>
|
||||||
|
|
||||||
|
@ -107,7 +113,7 @@ JumpServer是一款安全产品,请参考 [基本安全建议](https://docs.ju
|
||||||
- 邮箱:support@fit2cloud.com
|
- 邮箱:support@fit2cloud.com
|
||||||
- 电话:400-052-0755
|
- 电话:400-052-0755
|
||||||
|
|
||||||
## 致谢
|
## 致谢开源
|
||||||
|
|
||||||
- [Apache Guacamole](https://guacamole.apache.org/): Web 页面连接 RDP、SSH、VNC 等协议资产,JumpServer Lion 组件使用到该项目;
|
- [Apache Guacamole](https://guacamole.apache.org/): Web 页面连接 RDP、SSH、VNC 等协议资产,JumpServer Lion 组件使用到该项目;
|
||||||
- [OmniDB](https://omnidb.org/): Web 页面连接使用数据库,JumpServer Web 数据库组件使用到该项目。
|
- [OmniDB](https://omnidb.org/): Web 页面连接使用数据库,JumpServer Web 数据库组件使用到该项目。
|
||||||
|
|
|
@ -32,6 +32,7 @@ class AccountViewSet(OrgBulkModelViewSet):
|
||||||
'su_from_accounts': 'accounts.view_account',
|
'su_from_accounts': 'accounts.view_account',
|
||||||
'clear_secret': 'accounts.change_account',
|
'clear_secret': 'accounts.change_account',
|
||||||
}
|
}
|
||||||
|
export_as_zip = True
|
||||||
|
|
||||||
@action(methods=['get'], detail=False, url_path='su-from-accounts')
|
@action(methods=['get'], detail=False, url_path='su-from-accounts')
|
||||||
def su_from_accounts(self, request, *args, **kwargs):
|
def su_from_accounts(self, request, *args, **kwargs):
|
||||||
|
|
|
@ -1,4 +1,6 @@
|
||||||
from django_filters import rest_framework as drf_filters
|
from django_filters import rest_framework as drf_filters
|
||||||
|
from rest_framework.decorators import action
|
||||||
|
from rest_framework.response import Response
|
||||||
|
|
||||||
from accounts import serializers
|
from accounts import serializers
|
||||||
from accounts.models import AccountTemplate
|
from accounts.models import AccountTemplate
|
||||||
|
@ -38,8 +40,20 @@ class AccountTemplateViewSet(OrgBulkModelViewSet):
|
||||||
filterset_class = AccountTemplateFilterSet
|
filterset_class = AccountTemplateFilterSet
|
||||||
search_fields = ('username', 'name')
|
search_fields = ('username', 'name')
|
||||||
serializer_classes = {
|
serializer_classes = {
|
||||||
'default': serializers.AccountTemplateSerializer
|
'default': serializers.AccountTemplateSerializer,
|
||||||
}
|
}
|
||||||
|
rbac_perms = {
|
||||||
|
'su_from_account_templates': 'accounts.view_accounttemplate',
|
||||||
|
}
|
||||||
|
|
||||||
|
@action(methods=['get'], detail=False, url_path='su-from-account-templates')
|
||||||
|
def su_from_account_templates(self, request, *args, **kwargs):
|
||||||
|
pk = request.query_params.get('template_id')
|
||||||
|
template = AccountTemplate.objects.filter(pk=pk).first()
|
||||||
|
templates = AccountTemplate.get_su_from_account_templates(template)
|
||||||
|
templates = self.filter_queryset(templates)
|
||||||
|
serializer = self.get_serializer(templates, many=True)
|
||||||
|
return Response(data=serializer.data)
|
||||||
|
|
||||||
|
|
||||||
class AccountTemplateSecretsViewSet(RecordViewLogMixin, AccountTemplateViewSet):
|
class AccountTemplateSecretsViewSet(RecordViewLogMixin, AccountTemplateViewSet):
|
||||||
|
|
|
@ -9,6 +9,7 @@
|
||||||
name: "{{ account.username }}"
|
name: "{{ account.username }}"
|
||||||
password: "{{ account.secret | password_hash('des') }}"
|
password: "{{ account.secret | password_hash('des') }}"
|
||||||
update_password: always
|
update_password: always
|
||||||
|
ignore_errors: true
|
||||||
when: account.secret_type == "password"
|
when: account.secret_type == "password"
|
||||||
|
|
||||||
- name: create user If it already exists, no operation will be performed
|
- name: create user If it already exists, no operation will be performed
|
||||||
|
|
|
@ -9,6 +9,7 @@
|
||||||
name: "{{ account.username }}"
|
name: "{{ account.username }}"
|
||||||
password: "{{ account.secret | password_hash('sha512') }}"
|
password: "{{ account.secret | password_hash('sha512') }}"
|
||||||
update_password: always
|
update_password: always
|
||||||
|
ignore_errors: true
|
||||||
when: account.secret_type == "password"
|
when: account.secret_type == "password"
|
||||||
|
|
||||||
- name: create user If it already exists, no operation will be performed
|
- name: create user If it already exists, no operation will be performed
|
||||||
|
|
|
@ -21,6 +21,7 @@
|
||||||
groups: "{{ user_info.groups[0].name }}"
|
groups: "{{ user_info.groups[0].name }}"
|
||||||
groups_action: add
|
groups_action: add
|
||||||
update_password: always
|
update_password: always
|
||||||
|
ignore_errors: true
|
||||||
when: account.secret_type == "password"
|
when: account.secret_type == "password"
|
||||||
|
|
||||||
- name: Refresh connection
|
- name: Refresh connection
|
||||||
|
|
|
@ -72,14 +72,14 @@ class ChangeSecretManager(AccountBasePlaybookManager):
|
||||||
return []
|
return []
|
||||||
|
|
||||||
asset = privilege_account.asset
|
asset = privilege_account.asset
|
||||||
accounts = asset.accounts.exclude(username=privilege_account.username)
|
accounts = asset.accounts.all()
|
||||||
accounts = accounts.filter(id__in=self.account_ids)
|
accounts = accounts.filter(id__in=self.account_ids)
|
||||||
if self.secret_type:
|
if self.secret_type:
|
||||||
accounts = accounts.filter(secret_type=self.secret_type)
|
accounts = accounts.filter(secret_type=self.secret_type)
|
||||||
|
|
||||||
if settings.CHANGE_AUTH_PLAN_SECURE_MODE_ENABLED:
|
if settings.CHANGE_AUTH_PLAN_SECURE_MODE_ENABLED:
|
||||||
accounts = accounts.filter(privileged=False).exclude(
|
accounts = accounts.filter(privileged=False).exclude(
|
||||||
username__in=['root', 'administrator']
|
username__in=['root', 'administrator', privilege_account.username]
|
||||||
)
|
)
|
||||||
return accounts
|
return accounts
|
||||||
|
|
||||||
|
|
|
@ -13,8 +13,8 @@ class GatherAccountsFilter:
|
||||||
def mysql_filter(info):
|
def mysql_filter(info):
|
||||||
result = {}
|
result = {}
|
||||||
for _, user_dict in info.items():
|
for _, user_dict in info.items():
|
||||||
for username, data in user_dict.items():
|
for username, _ in user_dict.items():
|
||||||
if data.get('account_locked') == 'N':
|
if len(username.split('.')) == 1:
|
||||||
result[username] = {}
|
result[username] = {}
|
||||||
return result
|
return result
|
||||||
|
|
||||||
|
|
|
@ -43,6 +43,7 @@
|
||||||
name: "{{ account.username }}"
|
name: "{{ account.username }}"
|
||||||
password: "{{ account.secret | password_hash('sha512') }}"
|
password: "{{ account.secret | password_hash('sha512') }}"
|
||||||
update_password: always
|
update_password: always
|
||||||
|
ignore_errors: true
|
||||||
when: account.secret_type == "password"
|
when: account.secret_type == "password"
|
||||||
|
|
||||||
- name: remove jumpserver ssh key
|
- name: remove jumpserver ssh key
|
||||||
|
|
|
@ -43,6 +43,7 @@
|
||||||
name: "{{ account.username }}"
|
name: "{{ account.username }}"
|
||||||
password: "{{ account.secret | password_hash('sha512') }}"
|
password: "{{ account.secret | password_hash('sha512') }}"
|
||||||
update_password: always
|
update_password: always
|
||||||
|
ignore_errors: true
|
||||||
when: account.secret_type == "password"
|
when: account.secret_type == "password"
|
||||||
|
|
||||||
- name: remove jumpserver ssh key
|
- name: remove jumpserver ssh key
|
||||||
|
|
|
@ -17,6 +17,7 @@
|
||||||
groups: "{{ params.groups }}"
|
groups: "{{ params.groups }}"
|
||||||
groups_action: add
|
groups_action: add
|
||||||
update_password: always
|
update_password: always
|
||||||
|
ignore_errors: true
|
||||||
when: account.secret_type == "password"
|
when: account.secret_type == "password"
|
||||||
|
|
||||||
- name: Refresh connection
|
- name: Refresh connection
|
||||||
|
|
|
@ -10,5 +10,5 @@
|
||||||
login_port: "{{ jms_asset.port }}"
|
login_port: "{{ jms_asset.port }}"
|
||||||
login_user: "{{ account.username }}"
|
login_user: "{{ account.username }}"
|
||||||
login_password: "{{ account.secret }}"
|
login_password: "{{ account.secret }}"
|
||||||
login_secret_type: "{{ jms_account.secret_type }}"
|
login_secret_type: "{{ account.secret_type }}"
|
||||||
login_private_key_path: "{{ jms_account.private_key_path }}"
|
login_private_key_path: "{{ account.private_key_path }}"
|
||||||
|
|
|
@ -5,7 +5,6 @@ from django_filters import rest_framework as drf_filters
|
||||||
|
|
||||||
from assets.models import Node
|
from assets.models import Node
|
||||||
from common.drf.filters import BaseFilterSet
|
from common.drf.filters import BaseFilterSet
|
||||||
|
|
||||||
from .models import Account, GatheredAccount
|
from .models import Account, GatheredAccount
|
||||||
|
|
||||||
|
|
||||||
|
@ -46,7 +45,7 @@ class AccountFilterSet(BaseFilterSet):
|
||||||
|
|
||||||
class Meta:
|
class Meta:
|
||||||
model = Account
|
model = Account
|
||||||
fields = ['id', 'asset_id']
|
fields = ['id', 'asset_id', 'source_id']
|
||||||
|
|
||||||
|
|
||||||
class GatheredAccountFilterSet(BaseFilterSet):
|
class GatheredAccountFilterSet(BaseFilterSet):
|
||||||
|
|
|
@ -0,0 +1,29 @@
|
||||||
|
# Generated by Django 3.2.17 on 2023-05-06 06:43
|
||||||
|
|
||||||
|
from django.db import migrations, models
|
||||||
|
import django.db.models.deletion
|
||||||
|
|
||||||
|
|
||||||
|
class Migration(migrations.Migration):
|
||||||
|
|
||||||
|
dependencies = [
|
||||||
|
('accounts', '0010_gatheraccountsautomation_is_sync_account'),
|
||||||
|
]
|
||||||
|
|
||||||
|
operations = [
|
||||||
|
migrations.AddField(
|
||||||
|
model_name='accounttemplate',
|
||||||
|
name='su_from',
|
||||||
|
field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='su_to', to='accounts.accounttemplate', verbose_name='Su from'),
|
||||||
|
),
|
||||||
|
migrations.AlterField(
|
||||||
|
model_name='changesecretautomation',
|
||||||
|
name='ssh_key_change_strategy',
|
||||||
|
field=models.CharField(choices=[('add', 'Append SSH KEY'), ('set', 'Empty and append SSH KEY'), ('set_jms', 'Replace (Replace only keys pushed by JumpServer) ')], default='add', max_length=16, verbose_name='SSH key change strategy'),
|
||||||
|
),
|
||||||
|
migrations.AlterField(
|
||||||
|
model_name='pushaccountautomation',
|
||||||
|
name='ssh_key_change_strategy',
|
||||||
|
field=models.CharField(choices=[('add', 'Append SSH KEY'), ('set', 'Empty and append SSH KEY'), ('set_jms', 'Replace (Replace only keys pushed by JumpServer) ')], default='add', max_length=16, verbose_name='SSH key change strategy'),
|
||||||
|
),
|
||||||
|
]
|
|
@ -1,4 +1,6 @@
|
||||||
from django.db import models
|
from django.db import models
|
||||||
|
from django.db.models import Count, Q
|
||||||
|
from django.utils import timezone
|
||||||
from django.utils.translation import gettext_lazy as _
|
from django.utils.translation import gettext_lazy as _
|
||||||
from simple_history.models import HistoricalRecords
|
from simple_history.models import HistoricalRecords
|
||||||
|
|
||||||
|
@ -106,6 +108,11 @@ class Account(AbsConnectivity, BaseAccount):
|
||||||
|
|
||||||
|
|
||||||
class AccountTemplate(BaseAccount):
|
class AccountTemplate(BaseAccount):
|
||||||
|
su_from = models.ForeignKey(
|
||||||
|
'self', related_name='su_to', null=True,
|
||||||
|
on_delete=models.SET_NULL, verbose_name=_("Su from")
|
||||||
|
)
|
||||||
|
|
||||||
class Meta:
|
class Meta:
|
||||||
verbose_name = _('Account template')
|
verbose_name = _('Account template')
|
||||||
unique_together = (
|
unique_together = (
|
||||||
|
@ -116,5 +123,62 @@ class AccountTemplate(BaseAccount):
|
||||||
('change_accounttemplatesecret', _('Can change asset account template secret')),
|
('change_accounttemplatesecret', _('Can change asset account template secret')),
|
||||||
]
|
]
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def get_su_from_account_templates(cls, instance=None):
|
||||||
|
if not instance:
|
||||||
|
return cls.objects.all()
|
||||||
|
return cls.objects.exclude(Q(id=instance.id) | Q(su_from=instance))
|
||||||
|
|
||||||
|
def get_su_from_account(self, asset):
|
||||||
|
su_from = self.su_from
|
||||||
|
if su_from and asset.platform.su_enabled:
|
||||||
|
account = asset.accounts.filter(
|
||||||
|
username=su_from.username,
|
||||||
|
secret_type=su_from.secret_type
|
||||||
|
).first()
|
||||||
|
return account
|
||||||
|
|
||||||
def __str__(self):
|
def __str__(self):
|
||||||
return self.username
|
return self.username
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def bulk_update_accounts(accounts, data):
|
||||||
|
history_model = Account.history.model
|
||||||
|
account_ids = accounts.values_list('id', flat=True)
|
||||||
|
history_accounts = history_model.objects.filter(id__in=account_ids)
|
||||||
|
account_id_count_map = {
|
||||||
|
str(i['id']): i['count']
|
||||||
|
for i in history_accounts.values('id').order_by('id')
|
||||||
|
.annotate(count=Count(1)).values('id', 'count')
|
||||||
|
}
|
||||||
|
|
||||||
|
for account in accounts:
|
||||||
|
account_id = str(account.id)
|
||||||
|
account.version = account_id_count_map.get(account_id) + 1
|
||||||
|
for k, v in data.items():
|
||||||
|
setattr(account, k, v)
|
||||||
|
Account.objects.bulk_update(accounts, ['version', 'secret'])
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def bulk_create_history_accounts(accounts, user_id):
|
||||||
|
history_model = Account.history.model
|
||||||
|
history_account_objs = []
|
||||||
|
for account in accounts:
|
||||||
|
history_account_objs.append(
|
||||||
|
history_model(
|
||||||
|
id=account.id,
|
||||||
|
version=account.version,
|
||||||
|
secret=account.secret,
|
||||||
|
secret_type=account.secret_type,
|
||||||
|
history_user_id=user_id,
|
||||||
|
history_date=timezone.now()
|
||||||
|
)
|
||||||
|
)
|
||||||
|
history_model.objects.bulk_create(history_account_objs)
|
||||||
|
|
||||||
|
def bulk_sync_account_secret(self, accounts, user_id):
|
||||||
|
""" 批量同步账号密码 """
|
||||||
|
if not accounts:
|
||||||
|
return
|
||||||
|
self.bulk_update_accounts(accounts, {'secret': self.secret})
|
||||||
|
self.bulk_create_history_accounts(accounts, user_id)
|
||||||
|
|
|
@ -1,4 +1,5 @@
|
||||||
import uuid
|
import uuid
|
||||||
|
from copy import deepcopy
|
||||||
|
|
||||||
from django.db import IntegrityError
|
from django.db import IntegrityError
|
||||||
from django.db.models import Q
|
from django.db.models import Q
|
||||||
|
@ -21,8 +22,8 @@ logger = get_logger(__name__)
|
||||||
|
|
||||||
class AccountCreateUpdateSerializerMixin(serializers.Serializer):
|
class AccountCreateUpdateSerializerMixin(serializers.Serializer):
|
||||||
template = serializers.PrimaryKeyRelatedField(
|
template = serializers.PrimaryKeyRelatedField(
|
||||||
queryset=AccountTemplate.objects,
|
queryset=AccountTemplate.objects, required=False,
|
||||||
required=False, label=_("Template"), write_only=True
|
label=_("Template"), write_only=True, allow_null=True
|
||||||
)
|
)
|
||||||
push_now = serializers.BooleanField(
|
push_now = serializers.BooleanField(
|
||||||
default=False, label=_("Push now"), write_only=True
|
default=False, label=_("Push now"), write_only=True
|
||||||
|
@ -32,9 +33,10 @@ class AccountCreateUpdateSerializerMixin(serializers.Serializer):
|
||||||
)
|
)
|
||||||
on_invalid = LabeledChoiceField(
|
on_invalid = LabeledChoiceField(
|
||||||
choices=AccountInvalidPolicy.choices, default=AccountInvalidPolicy.ERROR,
|
choices=AccountInvalidPolicy.choices, default=AccountInvalidPolicy.ERROR,
|
||||||
write_only=True, label=_('Exist policy')
|
write_only=True, allow_null=True, label=_('Exist policy'),
|
||||||
)
|
)
|
||||||
_template = None
|
_template = None
|
||||||
|
clean_auth_fields: callable
|
||||||
|
|
||||||
class Meta:
|
class Meta:
|
||||||
fields = ['template', 'push_now', 'params', 'on_invalid']
|
fields = ['template', 'push_now', 'params', 'on_invalid']
|
||||||
|
@ -91,7 +93,7 @@ class AccountCreateUpdateSerializerMixin(serializers.Serializer):
|
||||||
|
|
||||||
self._template = template
|
self._template = template
|
||||||
# Set initial data from template
|
# Set initial data from template
|
||||||
ignore_fields = ['id', 'date_created', 'date_updated', 'org_id']
|
ignore_fields = ['id', 'date_created', 'date_updated', 'su_from', 'org_id']
|
||||||
field_names = [
|
field_names = [
|
||||||
field.name for field in template._meta.fields
|
field.name for field in template._meta.fields
|
||||||
if field.name not in ignore_fields
|
if field.name not in ignore_fields
|
||||||
|
@ -151,12 +153,14 @@ class AccountCreateUpdateSerializerMixin(serializers.Serializer):
|
||||||
template = self._template
|
template = self._template
|
||||||
if template is None:
|
if template is None:
|
||||||
return
|
return
|
||||||
|
|
||||||
validated_data['source'] = Source.TEMPLATE
|
validated_data['source'] = Source.TEMPLATE
|
||||||
validated_data['source_id'] = str(template.id)
|
validated_data['source_id'] = str(template.id)
|
||||||
|
|
||||||
def create(self, validated_data):
|
def create(self, validated_data):
|
||||||
push_now = validated_data.pop('push_now', None)
|
push_now = validated_data.pop('push_now', None)
|
||||||
params = validated_data.pop('params', None)
|
params = validated_data.pop('params', None)
|
||||||
|
self.clean_auth_fields(validated_data)
|
||||||
self.generate_source_data(validated_data)
|
self.generate_source_data(validated_data)
|
||||||
instance, stat = self.do_create(validated_data)
|
instance, stat = self.do_create(validated_data)
|
||||||
self.push_account_if_need(instance, push_now, params, stat)
|
self.push_account_if_need(instance, push_now, params, stat)
|
||||||
|
@ -238,14 +242,18 @@ class AssetAccountBulkSerializerResultSerializer(serializers.Serializer):
|
||||||
class AssetAccountBulkSerializer(
|
class AssetAccountBulkSerializer(
|
||||||
AccountCreateUpdateSerializerMixin, AuthValidateMixin, serializers.ModelSerializer
|
AccountCreateUpdateSerializerMixin, AuthValidateMixin, serializers.ModelSerializer
|
||||||
):
|
):
|
||||||
|
su_from_username = serializers.CharField(
|
||||||
|
max_length=128, required=False, write_only=True, allow_null=True, label=_("Su from"),
|
||||||
|
allow_blank=True,
|
||||||
|
)
|
||||||
assets = serializers.PrimaryKeyRelatedField(queryset=Asset.objects, many=True, label=_('Assets'))
|
assets = serializers.PrimaryKeyRelatedField(queryset=Asset.objects, many=True, label=_('Assets'))
|
||||||
|
|
||||||
class Meta:
|
class Meta:
|
||||||
model = Account
|
model = Account
|
||||||
fields = [
|
fields = [
|
||||||
'name', 'username', 'secret', 'secret_type',
|
'name', 'username', 'secret', 'secret_type', 'passphrase',
|
||||||
'privileged', 'is_active', 'comment', 'template',
|
'privileged', 'is_active', 'comment', 'template',
|
||||||
'on_invalid', 'push_now', 'assets',
|
'on_invalid', 'push_now', 'assets', 'su_from_username'
|
||||||
]
|
]
|
||||||
extra_kwargs = {
|
extra_kwargs = {
|
||||||
'name': {'required': False},
|
'name': {'required': False},
|
||||||
|
@ -293,8 +301,21 @@ class AssetAccountBulkSerializer(
|
||||||
raise serializers.ValidationError(_('Account already exists'))
|
raise serializers.ValidationError(_('Account already exists'))
|
||||||
return instance, True, 'created'
|
return instance, True, 'created'
|
||||||
|
|
||||||
|
def generate_su_from_data(self, validated_data):
|
||||||
|
template = self._template
|
||||||
|
asset = validated_data['asset']
|
||||||
|
su_from = validated_data.get('su_from')
|
||||||
|
su_from_username = validated_data.pop('su_from_username', None)
|
||||||
|
if template:
|
||||||
|
su_from = template.get_su_from_account(asset)
|
||||||
|
elif su_from_username:
|
||||||
|
su_from = asset.accounts.filter(username=su_from_username).first()
|
||||||
|
validated_data['su_from'] = su_from
|
||||||
|
|
||||||
def perform_create(self, vd, handler):
|
def perform_create(self, vd, handler):
|
||||||
lookup = self.get_filter_lookup(vd)
|
lookup = self.get_filter_lookup(vd)
|
||||||
|
vd = deepcopy(vd)
|
||||||
|
self.generate_su_from_data(vd)
|
||||||
try:
|
try:
|
||||||
instance, changed, state = handler(vd, lookup)
|
instance, changed, state = handler(vd, lookup)
|
||||||
except IntegrityError:
|
except IntegrityError:
|
||||||
|
@ -335,6 +356,7 @@ class AssetAccountBulkSerializer(
|
||||||
vd = vd.copy()
|
vd = vd.copy()
|
||||||
vd['asset'] = asset
|
vd['asset'] = asset
|
||||||
try:
|
try:
|
||||||
|
self.clean_auth_fields(vd)
|
||||||
instance, changed, state = self.perform_create(vd, create_handler)
|
instance, changed, state = self.perform_create(vd, create_handler)
|
||||||
_results[asset] = {
|
_results[asset] = {
|
||||||
'changed': changed, 'instance': instance.id, 'state': state
|
'changed': changed, 'instance': instance.id, 'state': state
|
||||||
|
|
|
@ -1,6 +1,6 @@
|
||||||
# -*- coding: utf-8 -*-
|
# -*- coding: utf-8 -*-
|
||||||
#
|
#
|
||||||
from django.utils.translation import ugettext as _
|
from django.utils.translation import ugettext_lazy as _
|
||||||
from rest_framework import serializers
|
from rest_framework import serializers
|
||||||
|
|
||||||
from accounts.models import AccountBackupAutomation, AccountBackupExecution
|
from accounts.models import AccountBackupAutomation, AccountBackupExecution
|
||||||
|
|
|
@ -78,4 +78,5 @@ class BaseAccountSerializer(AuthValidateMixin, BulkOrgResourceModelSerializer):
|
||||||
]
|
]
|
||||||
extra_kwargs = {
|
extra_kwargs = {
|
||||||
'spec_info': {'label': _('Spec info')},
|
'spec_info': {'label': _('Spec info')},
|
||||||
|
'username': {'help_text': _("Tip: If no username is required for authentication, fill in `null`")}
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,86 +1,44 @@
|
||||||
from django.db.transaction import atomic
|
from django.utils.translation import ugettext_lazy as _
|
||||||
from django.db.utils import IntegrityError
|
from rest_framework import serializers
|
||||||
|
|
||||||
from accounts.models import AccountTemplate, Account
|
from accounts.models import AccountTemplate, Account
|
||||||
from assets.models import Asset
|
|
||||||
from common.serializers import SecretReadableMixin
|
from common.serializers import SecretReadableMixin
|
||||||
|
from common.serializers.fields import ObjectRelatedField
|
||||||
from .base import BaseAccountSerializer
|
from .base import BaseAccountSerializer
|
||||||
|
|
||||||
|
|
||||||
class AccountTemplateSerializer(BaseAccountSerializer):
|
class AccountTemplateSerializer(BaseAccountSerializer):
|
||||||
|
is_sync_account = serializers.BooleanField(default=False, write_only=True)
|
||||||
|
_is_sync_account = False
|
||||||
|
|
||||||
|
su_from = ObjectRelatedField(
|
||||||
|
required=False, queryset=AccountTemplate.objects, allow_null=True,
|
||||||
|
allow_empty=True, label=_('Su from'), attrs=('id', 'name', 'username')
|
||||||
|
)
|
||||||
|
|
||||||
class Meta(BaseAccountSerializer.Meta):
|
class Meta(BaseAccountSerializer.Meta):
|
||||||
model = AccountTemplate
|
model = AccountTemplate
|
||||||
|
fields = BaseAccountSerializer.Meta.fields + ['is_sync_account', 'su_from']
|
||||||
|
|
||||||
@staticmethod
|
def sync_accounts_secret(self, instance, diff):
|
||||||
def account_save(data, account):
|
if not self._is_sync_account or 'secret' not in diff:
|
||||||
for field, value in data.items():
|
|
||||||
setattr(account, field, value)
|
|
||||||
try:
|
|
||||||
account.save(update_fields=list(data.keys()))
|
|
||||||
except IntegrityError:
|
|
||||||
pass
|
|
||||||
|
|
||||||
# TODO 数据库访问的太多了 后期优化
|
|
||||||
@atomic()
|
|
||||||
def bulk_update_accounts(self, instance, diff):
|
|
||||||
accounts = Account.objects.filter(source_id=instance.id)
|
|
||||||
if not accounts:
|
|
||||||
return
|
return
|
||||||
|
|
||||||
diff.pop('secret', None)
|
accounts = Account.objects.filter(source_id=instance.id)
|
||||||
name = diff.pop('name', None)
|
instance.bulk_sync_account_secret(accounts, self.context['request'].user.id)
|
||||||
username = diff.pop('username', None)
|
|
||||||
secret_type = diff.pop('secret_type', None)
|
|
||||||
update_accounts = []
|
|
||||||
for account in accounts:
|
|
||||||
for field, value in diff.items():
|
|
||||||
setattr(account, field, value)
|
|
||||||
update_accounts.append(account)
|
|
||||||
|
|
||||||
if update_accounts:
|
def validate(self, attrs):
|
||||||
Account.objects.bulk_update(update_accounts, diff.keys())
|
self._is_sync_account = attrs.pop('is_sync_account', None)
|
||||||
|
attrs = super().validate(attrs)
|
||||||
if name:
|
return attrs
|
||||||
for account in accounts:
|
|
||||||
data = {'name': name}
|
|
||||||
self.account_save(data, account)
|
|
||||||
|
|
||||||
if secret_type and username:
|
|
||||||
asset_ids_supports = self.get_asset_ids_supports(accounts, secret_type)
|
|
||||||
for account in accounts:
|
|
||||||
asset_id = account.asset_id
|
|
||||||
if asset_id not in asset_ids_supports:
|
|
||||||
data = {'username': username}
|
|
||||||
self.account_save(data, account)
|
|
||||||
continue
|
|
||||||
data = {'username': username, 'secret_type': secret_type, 'secret': instance.secret}
|
|
||||||
self.account_save(data, account)
|
|
||||||
elif secret_type:
|
|
||||||
asset_ids_supports = self.get_asset_ids_supports(accounts, secret_type)
|
|
||||||
for account in accounts:
|
|
||||||
asset_id = account.asset_id
|
|
||||||
if asset_id not in asset_ids_supports:
|
|
||||||
continue
|
|
||||||
data = {'secret_type': secret_type, 'secret': instance.secret}
|
|
||||||
self.account_save(data, account)
|
|
||||||
elif username:
|
|
||||||
for account in accounts:
|
|
||||||
data = {'username': username}
|
|
||||||
self.account_save(data, account)
|
|
||||||
|
|
||||||
@staticmethod
|
|
||||||
def get_asset_ids_supports(accounts, secret_type):
|
|
||||||
asset_ids = accounts.values_list('asset_id', flat=True)
|
|
||||||
secret_type_supports = Asset.get_secret_type_assets(asset_ids, secret_type)
|
|
||||||
return [asset.id for asset in secret_type_supports]
|
|
||||||
|
|
||||||
def update(self, instance, validated_data):
|
def update(self, instance, validated_data):
|
||||||
# diff = {
|
diff = {
|
||||||
# k: v for k, v in validated_data.items()
|
k: v for k, v in validated_data.items()
|
||||||
# if getattr(instance, k) != v
|
if getattr(instance, k, None) != v
|
||||||
# }
|
}
|
||||||
instance = super().update(instance, validated_data)
|
instance = super().update(instance, validated_data)
|
||||||
# self.bulk_update_accounts(instance, diff)
|
self.sync_accounts_secret(instance, diff)
|
||||||
return instance
|
return instance
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -1,4 +1,4 @@
|
||||||
from django.utils.translation import ugettext as _
|
from django.utils.translation import ugettext_lazy as _
|
||||||
from rest_framework import serializers
|
from rest_framework import serializers
|
||||||
|
|
||||||
from accounts.models import AutomationExecution
|
from accounts.models import AutomationExecution
|
||||||
|
|
|
@ -2,6 +2,7 @@ from django.utils.translation import ugettext_lazy as _
|
||||||
from rest_framework import serializers
|
from rest_framework import serializers
|
||||||
|
|
||||||
from acls.models.base import ActionChoices
|
from acls.models.base import ActionChoices
|
||||||
|
from jumpserver.utils import has_valid_xpack_license
|
||||||
from common.serializers.fields import JSONManyToManyField, ObjectRelatedField, LabeledChoiceField
|
from common.serializers.fields import JSONManyToManyField, ObjectRelatedField, LabeledChoiceField
|
||||||
from orgs.models import Organization
|
from orgs.models import Organization
|
||||||
from users.models import User
|
from users.models import User
|
||||||
|
@ -51,7 +52,26 @@ class ACLAccountsSerializer(serializers.Serializer):
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
class BaseUserAssetAccountACLSerializerMixin(serializers.Serializer):
|
class ActionAclSerializer(serializers.Serializer):
|
||||||
|
action = LabeledChoiceField(
|
||||||
|
choices=ActionChoices.choices, default=ActionChoices.reject, label=_("Action")
|
||||||
|
)
|
||||||
|
|
||||||
|
def __init__(self, *args, **kwargs):
|
||||||
|
super().__init__(*args, **kwargs)
|
||||||
|
self.set_action_choices()
|
||||||
|
|
||||||
|
def set_action_choices(self):
|
||||||
|
action = self.fields.get("action")
|
||||||
|
if not action:
|
||||||
|
return
|
||||||
|
choices = action.choices
|
||||||
|
if not has_valid_xpack_license():
|
||||||
|
choices.pop(ActionChoices.review, None)
|
||||||
|
action._choices = choices
|
||||||
|
|
||||||
|
|
||||||
|
class BaseUserAssetAccountACLSerializerMixin(ActionAclSerializer, serializers.Serializer):
|
||||||
users = JSONManyToManyField(label=_('User'))
|
users = JSONManyToManyField(label=_('User'))
|
||||||
assets = JSONManyToManyField(label=_('Asset'))
|
assets = JSONManyToManyField(label=_('Asset'))
|
||||||
accounts = serializers.ListField(label=_('Account'))
|
accounts = serializers.ListField(label=_('Account'))
|
||||||
|
@ -61,9 +81,6 @@ class BaseUserAssetAccountACLSerializerMixin(serializers.Serializer):
|
||||||
reviewers_amount = serializers.IntegerField(
|
reviewers_amount = serializers.IntegerField(
|
||||||
read_only=True, source="reviewers.count", label=_('Reviewers amount')
|
read_only=True, source="reviewers.count", label=_('Reviewers amount')
|
||||||
)
|
)
|
||||||
action = LabeledChoiceField(
|
|
||||||
choices=ActionChoices.choices, default=ActionChoices.reject, label=_("Action")
|
|
||||||
)
|
|
||||||
|
|
||||||
class Meta:
|
class Meta:
|
||||||
fields_mini = ["id", "name"]
|
fields_mini = ["id", "name"]
|
||||||
|
|
|
@ -2,12 +2,11 @@ from django.utils.translation import ugettext as _
|
||||||
from rest_framework import serializers
|
from rest_framework import serializers
|
||||||
|
|
||||||
from common.serializers import BulkModelSerializer, MethodSerializer
|
from common.serializers import BulkModelSerializer, MethodSerializer
|
||||||
from common.serializers.fields import ObjectRelatedField, LabeledChoiceField
|
from common.serializers.fields import ObjectRelatedField
|
||||||
from jumpserver.utils import has_valid_xpack_license
|
|
||||||
from users.models import User
|
from users.models import User
|
||||||
|
from .base import ActionAclSerializer
|
||||||
from .rules import RuleSerializer
|
from .rules import RuleSerializer
|
||||||
from ..models import LoginACL
|
from ..models import LoginACL
|
||||||
from ..models.base import ActionChoices
|
|
||||||
|
|
||||||
__all__ = [
|
__all__ = [
|
||||||
"LoginACLSerializer",
|
"LoginACLSerializer",
|
||||||
|
@ -18,12 +17,11 @@ common_help_text = _(
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
class LoginACLSerializer(BulkModelSerializer):
|
class LoginACLSerializer(ActionAclSerializer, BulkModelSerializer):
|
||||||
user = ObjectRelatedField(queryset=User.objects, label=_("User"))
|
user = ObjectRelatedField(queryset=User.objects, label=_("User"))
|
||||||
reviewers = ObjectRelatedField(
|
reviewers = ObjectRelatedField(
|
||||||
queryset=User.objects, label=_("Reviewers"), many=True, required=False
|
queryset=User.objects, label=_("Reviewers"), many=True, required=False
|
||||||
)
|
)
|
||||||
action = LabeledChoiceField(choices=ActionChoices.choices, label=_('Action'))
|
|
||||||
reviewers_amount = serializers.IntegerField(
|
reviewers_amount = serializers.IntegerField(
|
||||||
read_only=True, source="reviewers.count", label=_("Reviewers amount")
|
read_only=True, source="reviewers.count", label=_("Reviewers amount")
|
||||||
)
|
)
|
||||||
|
@ -45,18 +43,5 @@ class LoginACLSerializer(BulkModelSerializer):
|
||||||
"is_active": {"default": True},
|
"is_active": {"default": True},
|
||||||
}
|
}
|
||||||
|
|
||||||
def __init__(self, *args, **kwargs):
|
|
||||||
super().__init__(*args, **kwargs)
|
|
||||||
self.set_action_choices()
|
|
||||||
|
|
||||||
def set_action_choices(self):
|
|
||||||
action = self.fields.get("action")
|
|
||||||
if not action:
|
|
||||||
return
|
|
||||||
choices = action.choices
|
|
||||||
if not has_valid_xpack_license():
|
|
||||||
choices.pop(LoginACL.ActionChoices.review, None)
|
|
||||||
action._choices = choices
|
|
||||||
|
|
||||||
def get_rules_serializer(self):
|
def get_rules_serializer(self):
|
||||||
return RuleSerializer()
|
return RuleSerializer()
|
||||||
|
|
|
@ -25,7 +25,7 @@ from ...notifications import BulkUpdatePlatformSkipAssetUserMsg
|
||||||
logger = get_logger(__file__)
|
logger = get_logger(__file__)
|
||||||
__all__ = [
|
__all__ = [
|
||||||
"AssetViewSet", "AssetTaskCreateApi",
|
"AssetViewSet", "AssetTaskCreateApi",
|
||||||
"AssetsTaskCreateApi", 'AssetFilterSet'
|
"AssetsTaskCreateApi", 'AssetFilterSet',
|
||||||
]
|
]
|
||||||
|
|
||||||
|
|
||||||
|
@ -95,7 +95,7 @@ class AssetViewSet(SuggestionMixin, NodeFilterMixin, OrgBulkModelViewSet):
|
||||||
"""
|
"""
|
||||||
model = Asset
|
model = Asset
|
||||||
filterset_class = AssetFilterSet
|
filterset_class = AssetFilterSet
|
||||||
search_fields = ("name", "address")
|
search_fields = ("name", "address", "comment")
|
||||||
ordering_fields = ('name', 'connectivity', 'platform', 'date_updated')
|
ordering_fields = ('name', 'connectivity', 'platform', 'date_updated')
|
||||||
serializer_classes = (
|
serializer_classes = (
|
||||||
("default", serializers.AssetSerializer),
|
("default", serializers.AssetSerializer),
|
||||||
|
|
|
@ -69,7 +69,7 @@ class SerializeToTreeNodeMixin:
|
||||||
return 'file'
|
return 'file'
|
||||||
|
|
||||||
@timeit
|
@timeit
|
||||||
def serialize_assets(self, assets, node_key=None):
|
def serialize_assets(self, assets, node_key=None, pid=None):
|
||||||
sftp_enabled_platform = PlatformProtocol.objects \
|
sftp_enabled_platform = PlatformProtocol.objects \
|
||||||
.filter(name='ssh', setting__sftp_enabled=True) \
|
.filter(name='ssh', setting__sftp_enabled=True) \
|
||||||
.values_list('platform', flat=True) \
|
.values_list('platform', flat=True) \
|
||||||
|
@ -83,8 +83,10 @@ class SerializeToTreeNodeMixin:
|
||||||
{
|
{
|
||||||
'id': str(asset.id),
|
'id': str(asset.id),
|
||||||
'name': asset.name,
|
'name': asset.name,
|
||||||
'title': f'{asset.address}\n{asset.comment}',
|
'title':
|
||||||
'pId': get_pid(asset),
|
f'{asset.address}\n{asset.comment}'
|
||||||
|
if asset.comment else asset.address,
|
||||||
|
'pId': pid or get_pid(asset),
|
||||||
'isParent': False,
|
'isParent': False,
|
||||||
'open': False,
|
'open': False,
|
||||||
'iconSkin': self.get_icon(asset),
|
'iconSkin': self.get_icon(asset),
|
||||||
|
|
|
@ -163,8 +163,10 @@ class CategoryTreeApi(SerializeToTreeNodeMixin, generics.ListAPIView):
|
||||||
# 资源数量统计可选项 (asset, account)
|
# 资源数量统计可选项 (asset, account)
|
||||||
count_resource = self.request.query_params.get('count_resource', 'asset')
|
count_resource = self.request.query_params.get('count_resource', 'asset')
|
||||||
|
|
||||||
if include_asset and self.request.query_params.get('key'):
|
if not self.request.query_params.get('key'):
|
||||||
|
nodes = AllTypes.to_tree_nodes(include_asset, count_resource=count_resource)
|
||||||
|
elif include_asset:
|
||||||
nodes = self.get_assets()
|
nodes = self.get_assets()
|
||||||
else:
|
else:
|
||||||
nodes = AllTypes.to_tree_nodes(include_asset, count_resource=count_resource)
|
nodes = []
|
||||||
return Response(data=nodes)
|
return Response(data=nodes)
|
||||||
|
|
|
@ -166,6 +166,7 @@ class BasePlaybookManager:
|
||||||
account_prefer=self.ansible_account_prefer,
|
account_prefer=self.ansible_account_prefer,
|
||||||
account_policy=self.ansible_account_policy,
|
account_policy=self.ansible_account_policy,
|
||||||
host_callback=self.host_callback,
|
host_callback=self.host_callback,
|
||||||
|
task_type=self.__class__.method_type(),
|
||||||
)
|
)
|
||||||
inventory.write_to_file(inventory_path)
|
inventory.write_to_file(inventory_path)
|
||||||
|
|
||||||
|
@ -223,7 +224,8 @@ class BasePlaybookManager:
|
||||||
pass
|
pass
|
||||||
|
|
||||||
def on_host_error(self, host, error, result):
|
def on_host_error(self, host, error, result):
|
||||||
print('host error: {} -> {}'.format(host, error))
|
if settings.DEBUG_DEV:
|
||||||
|
print('host error: {} -> {}'.format(host, error))
|
||||||
|
|
||||||
def on_runner_success(self, runner, cb):
|
def on_runner_success(self, runner, cb):
|
||||||
summary = cb.summary
|
summary = cb.summary
|
||||||
|
|
|
@ -193,15 +193,38 @@ class AllTypes(ChoicesMixin):
|
||||||
}
|
}
|
||||||
return node
|
return node
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def to_tree_nodes(cls, include_asset, count_resource='asset'):
|
|
||||||
from accounts.models import Account
|
|
||||||
from ..models import Asset, Platform
|
|
||||||
if count_resource == 'account':
|
|
||||||
resource_platforms = Account.objects.all().values_list('asset__platform_id', flat=True)
|
|
||||||
else:
|
|
||||||
resource_platforms = Asset.objects.all().values_list('platform_id', flat=True)
|
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def asset_to_node(cls, asset, pid):
|
||||||
|
node = {
|
||||||
|
'id': '{}'.format(asset.id),
|
||||||
|
'name': asset.name,
|
||||||
|
'title': f'{asset.address}\n{asset.comment}',
|
||||||
|
'pId': pid,
|
||||||
|
'isParent': False,
|
||||||
|
'open': False,
|
||||||
|
'iconSkin': asset.type,
|
||||||
|
'chkDisabled': not asset.is_active,
|
||||||
|
'meta': {
|
||||||
|
'type': 'platform',
|
||||||
|
'data': {
|
||||||
|
'platform_type': asset.platform.type,
|
||||||
|
'org_name': asset.org_name,
|
||||||
|
# 'sftp': asset.platform_id in sftp_enabled_platform,
|
||||||
|
'name': asset.name,
|
||||||
|
'address': asset.address
|
||||||
|
},
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return node
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def get_root_nodes(cls):
|
||||||
|
return dict(id='ROOT', name=_('All types'), title=_('All types'), open=True, isParent=True)
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def get_tree_nodes(cls, resource_platforms, include_asset=False):
|
||||||
|
from ..models import Platform
|
||||||
platform_count = defaultdict(int)
|
platform_count = defaultdict(int)
|
||||||
for platform_id in resource_platforms:
|
for platform_id in resource_platforms:
|
||||||
platform_count[platform_id] += 1
|
platform_count[platform_id] += 1
|
||||||
|
@ -215,8 +238,7 @@ class AllTypes(ChoicesMixin):
|
||||||
category_type_mapper[p.category] += platform_count[p.id]
|
category_type_mapper[p.category] += platform_count[p.id]
|
||||||
tp_platforms[p.category + '_' + p.type].append(p)
|
tp_platforms[p.category + '_' + p.type].append(p)
|
||||||
|
|
||||||
root = dict(id='ROOT', name=_('All types'), title=_('All types'), open=True, isParent=True)
|
nodes = [cls.get_root_nodes()]
|
||||||
nodes = [root]
|
|
||||||
for category, type_cls in cls.category_types():
|
for category, type_cls in cls.category_types():
|
||||||
# Category 格式化
|
# Category 格式化
|
||||||
meta = {'type': 'category', 'category': category.value}
|
meta = {'type': 'category', 'category': category.value}
|
||||||
|
@ -244,6 +266,16 @@ class AllTypes(ChoicesMixin):
|
||||||
nodes.append(platform_node)
|
nodes.append(platform_node)
|
||||||
return nodes
|
return nodes
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def to_tree_nodes(cls, include_asset, count_resource='asset'):
|
||||||
|
from accounts.models import Account
|
||||||
|
from ..models import Asset
|
||||||
|
if count_resource == 'account':
|
||||||
|
resource_platforms = Account.objects.all().values_list('asset__platform_id', flat=True)
|
||||||
|
else:
|
||||||
|
resource_platforms = Asset.objects.all().values_list('platform_id', flat=True)
|
||||||
|
return cls.get_tree_nodes(resource_platforms, include_asset)
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def get_type_default_platform(cls, category, tp):
|
def get_type_default_platform(cls, category, tp):
|
||||||
constraints = cls.get_constraints(category, tp)
|
constraints = cls.get_constraints(category, tp)
|
||||||
|
|
|
@ -20,12 +20,13 @@ def get_prop_name_id(apps, app, category):
|
||||||
|
|
||||||
|
|
||||||
def migrate_database_to_asset(apps, *args):
|
def migrate_database_to_asset(apps, *args):
|
||||||
|
node_model = apps.get_model('assets', 'Node')
|
||||||
app_model = apps.get_model('applications', 'Application')
|
app_model = apps.get_model('applications', 'Application')
|
||||||
db_model = apps.get_model('assets', 'Database')
|
db_model = apps.get_model('assets', 'Database')
|
||||||
platform_model = apps.get_model('assets', 'Platform')
|
platform_model = apps.get_model('assets', 'Platform')
|
||||||
|
|
||||||
applications = app_model.objects.filter(category='db')
|
applications = app_model.objects.filter(category='db')
|
||||||
platforms = platform_model.objects.all().filter(internal=True)
|
platforms = platform_model.objects.all().filter(internal=True).exclude(name='Redis6+')
|
||||||
platforms_map = {p.type: p for p in platforms}
|
platforms_map = {p.type: p for p in platforms}
|
||||||
print()
|
print()
|
||||||
|
|
||||||
|
@ -84,11 +85,18 @@ def create_app_nodes(apps, org_id):
|
||||||
node_keys = node_model.objects.filter(org_id=org_id) \
|
node_keys = node_model.objects.filter(org_id=org_id) \
|
||||||
.filter(key__regex=child_pattern) \
|
.filter(key__regex=child_pattern) \
|
||||||
.values_list('key', flat=True)
|
.values_list('key', flat=True)
|
||||||
if not node_keys:
|
if node_keys:
|
||||||
return
|
node_key_split = [key.split(':') for key in node_keys]
|
||||||
node_key_split = [key.split(':') for key in node_keys]
|
next_value = max([int(k[1]) for k in node_key_split]) + 1
|
||||||
next_value = max([int(k[1]) for k in node_key_split]) + 1
|
parent_key = node_key_split[0][0]
|
||||||
parent_key = node_key_split[0][0]
|
else:
|
||||||
|
root_node = node_model.objects.filter(org_id=org_id)\
|
||||||
|
.filter(parent_key='', key__regex=r'^[0-9]+$').exclude(key__startswith='-').first()
|
||||||
|
if not root_node:
|
||||||
|
return
|
||||||
|
parent_key = root_node.key
|
||||||
|
next_value = 0
|
||||||
|
|
||||||
next_key = '{}:{}'.format(parent_key, next_value)
|
next_key = '{}:{}'.format(parent_key, next_value)
|
||||||
name = 'Apps'
|
name = 'Apps'
|
||||||
parent = node_model.objects.get(key=parent_key)
|
parent = node_model.objects.get(key=parent_key)
|
||||||
|
|
|
@ -161,11 +161,12 @@ def migrate_db_accounts(apps, schema_editor):
|
||||||
name = f'{username}(token)'
|
name = f'{username}(token)'
|
||||||
else:
|
else:
|
||||||
secret_type = attr
|
secret_type = attr
|
||||||
name = username
|
name = username or f'{username}(password)'
|
||||||
auth_infos.append((name, secret_type, secret))
|
auth_infos.append((name, secret_type, secret))
|
||||||
|
|
||||||
if not auth_infos:
|
if not auth_infos:
|
||||||
auth_infos.append((username, 'password', ''))
|
name = username or f'{username}(password)'
|
||||||
|
auth_infos.append((name, 'password', ''))
|
||||||
|
|
||||||
for name, secret_type, secret in auth_infos:
|
for name, secret_type, secret in auth_infos:
|
||||||
values['name'] = name
|
values['name'] = name
|
||||||
|
|
|
@ -0,0 +1,18 @@
|
||||||
|
# Generated by Django 3.2.17 on 2023-05-06 06:43
|
||||||
|
|
||||||
|
from django.db import migrations, models
|
||||||
|
|
||||||
|
|
||||||
|
class Migration(migrations.Migration):
|
||||||
|
|
||||||
|
dependencies = [
|
||||||
|
('assets', '0116_auto_20230418_1726'),
|
||||||
|
]
|
||||||
|
|
||||||
|
operations = [
|
||||||
|
migrations.AlterField(
|
||||||
|
model_name='baseautomation',
|
||||||
|
name='params',
|
||||||
|
field=models.JSONField(default=dict, verbose_name='Parameters'),
|
||||||
|
),
|
||||||
|
]
|
|
@ -26,14 +26,6 @@ class PlatformProtocol(models.Model):
|
||||||
def secret_types(self):
|
def secret_types(self):
|
||||||
return Protocol.settings().get(self.name, {}).get('secret_types', ['password'])
|
return Protocol.settings().get(self.name, {}).get('secret_types', ['password'])
|
||||||
|
|
||||||
def set_public(self):
|
|
||||||
private_protocol_set = ('winrm',)
|
|
||||||
self.public = self.name not in private_protocol_set
|
|
||||||
|
|
||||||
def save(self, **kwargs):
|
|
||||||
self.set_public()
|
|
||||||
return super().save(**kwargs)
|
|
||||||
|
|
||||||
|
|
||||||
class PlatformAutomation(models.Model):
|
class PlatformAutomation(models.Model):
|
||||||
ansible_enabled = models.BooleanField(default=False, verbose_name=_("Enabled"))
|
ansible_enabled = models.BooleanField(default=False, verbose_name=_("Enabled"))
|
||||||
|
|
|
@ -23,7 +23,7 @@ __all__ = [
|
||||||
'AssetSerializer', 'AssetSimpleSerializer', 'MiniAssetSerializer',
|
'AssetSerializer', 'AssetSimpleSerializer', 'MiniAssetSerializer',
|
||||||
'AssetTaskSerializer', 'AssetsTaskSerializer', 'AssetProtocolsSerializer',
|
'AssetTaskSerializer', 'AssetsTaskSerializer', 'AssetProtocolsSerializer',
|
||||||
'AssetDetailSerializer', 'DetailMixin', 'AssetAccountSerializer',
|
'AssetDetailSerializer', 'DetailMixin', 'AssetAccountSerializer',
|
||||||
'AccountSecretSerializer', 'AssetProtocolsPermsSerializer'
|
'AccountSecretSerializer', 'AssetProtocolsPermsSerializer', 'AssetLabelSerializer'
|
||||||
]
|
]
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -1,5 +1,6 @@
|
||||||
from django.utils.translation import ugettext_lazy as _
|
from django.utils.translation import ugettext_lazy as _
|
||||||
from rest_framework.serializers import ValidationError
|
from rest_framework.serializers import ValidationError
|
||||||
|
from rest_framework import serializers
|
||||||
|
|
||||||
from assets.models import Database
|
from assets.models import Database
|
||||||
from assets.serializers.gateway import GatewayWithAccountSecretSerializer
|
from assets.serializers.gateway import GatewayWithAccountSecretSerializer
|
||||||
|
@ -9,6 +10,8 @@ __all__ = ['DatabaseSerializer', 'DatabaseWithGatewaySerializer']
|
||||||
|
|
||||||
|
|
||||||
class DatabaseSerializer(AssetSerializer):
|
class DatabaseSerializer(AssetSerializer):
|
||||||
|
db_name = serializers.CharField(max_length=1024, label=_('Default database'), required=True)
|
||||||
|
|
||||||
class Meta(AssetSerializer.Meta):
|
class Meta(AssetSerializer.Meta):
|
||||||
model = Database
|
model = Database
|
||||||
extra_fields = [
|
extra_fields = [
|
||||||
|
|
|
@ -1,12 +1,12 @@
|
||||||
from django.utils.translation import ugettext as _
|
from django.utils.translation import ugettext as _
|
||||||
from rest_framework import serializers
|
from rest_framework import serializers
|
||||||
|
|
||||||
from ops.mixin import PeriodTaskSerializerMixin
|
|
||||||
from assets.models import Asset, Node, BaseAutomation, AutomationExecution
|
from assets.models import Asset, Node, BaseAutomation, AutomationExecution
|
||||||
from orgs.mixins.serializers import BulkOrgResourceModelSerializer
|
|
||||||
from common.utils import get_logger
|
|
||||||
from common.const.choices import Trigger
|
from common.const.choices import Trigger
|
||||||
from common.serializers.fields import ObjectRelatedField, LabeledChoiceField
|
from common.serializers.fields import ObjectRelatedField, LabeledChoiceField
|
||||||
|
from common.utils import get_logger
|
||||||
|
from ops.mixin import PeriodTaskSerializerMixin
|
||||||
|
from orgs.mixins.serializers import BulkOrgResourceModelSerializer
|
||||||
|
|
||||||
logger = get_logger(__file__)
|
logger = get_logger(__file__)
|
||||||
|
|
||||||
|
@ -48,9 +48,12 @@ class AutomationExecutionSerializer(serializers.ModelSerializer):
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def get_snapshot(obj):
|
def get_snapshot(obj):
|
||||||
|
from assets.const import AutomationTypes as AssetTypes
|
||||||
|
from accounts.const import AutomationTypes as AccountTypes
|
||||||
|
tp_dict = dict(AssetTypes.choices) | dict(AccountTypes.choices)
|
||||||
tp = obj.snapshot['type']
|
tp = obj.snapshot['type']
|
||||||
snapshot = {
|
snapshot = {
|
||||||
'type': tp,
|
'type': {'value': tp, 'label': tp_dict.get(tp, tp)},
|
||||||
'name': obj.snapshot['name'],
|
'name': obj.snapshot['name'],
|
||||||
'comment': obj.snapshot['comment'],
|
'comment': obj.snapshot['comment'],
|
||||||
'accounts': obj.snapshot['accounts'],
|
'accounts': obj.snapshot['accounts'],
|
||||||
|
|
|
@ -82,7 +82,7 @@ class PlatformProtocolSerializer(serializers.ModelSerializer):
|
||||||
model = PlatformProtocol
|
model = PlatformProtocol
|
||||||
fields = [
|
fields = [
|
||||||
"id", "name", "port", "primary",
|
"id", "name", "port", "primary",
|
||||||
"required", "default",
|
"required", "default", "public",
|
||||||
"secret_types", "setting",
|
"secret_types", "setting",
|
||||||
]
|
]
|
||||||
|
|
||||||
|
@ -122,6 +122,7 @@ class PlatformSerializer(WritableNestedModelSerializer):
|
||||||
fields_small = fields_mini + [
|
fields_small = fields_mini + [
|
||||||
"category", "type", "charset",
|
"category", "type", "charset",
|
||||||
]
|
]
|
||||||
|
fields_unexport = ['automation']
|
||||||
read_only_fields = [
|
read_only_fields = [
|
||||||
'internal', 'date_created', 'date_updated',
|
'internal', 'date_created', 'date_updated',
|
||||||
'created_by', 'updated_by'
|
'created_by', 'updated_by'
|
||||||
|
@ -156,20 +157,6 @@ class PlatformSerializer(WritableNestedModelSerializer):
|
||||||
constraints = AllTypes.get_constraints(category, tp)
|
constraints = AllTypes.get_constraints(category, tp)
|
||||||
return constraints
|
return constraints
|
||||||
|
|
||||||
def validate(self, attrs):
|
|
||||||
domain_enabled = attrs.get('domain_enabled', False) and self.constraints.get('domain_enabled', False)
|
|
||||||
su_enabled = attrs.get('su_enabled', False) and self.constraints.get('su_enabled', False)
|
|
||||||
automation = attrs.get('automation', {})
|
|
||||||
automation['ansible_enabled'] = automation.get('ansible_enabled', False) \
|
|
||||||
and self.constraints['automation'].get('ansible_enabled', False)
|
|
||||||
attrs.update({
|
|
||||||
'domain_enabled': domain_enabled,
|
|
||||||
'su_enabled': su_enabled,
|
|
||||||
'automation': automation,
|
|
||||||
})
|
|
||||||
self.initial_data['automation'] = automation
|
|
||||||
return attrs
|
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def setup_eager_loading(cls, queryset):
|
def setup_eager_loading(cls, queryset):
|
||||||
queryset = queryset.prefetch_related(
|
queryset = queryset.prefetch_related(
|
||||||
|
@ -187,6 +174,18 @@ class PlatformSerializer(WritableNestedModelSerializer):
|
||||||
self.initial_data['protocols'] = protocols
|
self.initial_data['protocols'] = protocols
|
||||||
return protocols
|
return protocols
|
||||||
|
|
||||||
|
def validate_su_enabled(self, su_enabled):
|
||||||
|
return su_enabled and self.constraints.get('su_enabled', False)
|
||||||
|
|
||||||
|
def validate_domain_enabled(self, domain_enabled):
|
||||||
|
return domain_enabled and self.constraints.get('domain_enabled', False)
|
||||||
|
|
||||||
|
def validate_automation(self, automation):
|
||||||
|
automation = automation or {}
|
||||||
|
automation = automation.get('ansible_enabled', False) \
|
||||||
|
and self.constraints['automation'].get('ansible_enabled', False)
|
||||||
|
return automation
|
||||||
|
|
||||||
|
|
||||||
class PlatformOpsMethodSerializer(serializers.Serializer):
|
class PlatformOpsMethodSerializer(serializers.Serializer):
|
||||||
id = serializers.CharField(read_only=True)
|
id = serializers.CharField(read_only=True)
|
||||||
|
|
|
@ -45,10 +45,8 @@ class OperatorLogHandler(metaclass=Singleton):
|
||||||
pre_value, value = self._consistent_type_to_str(pre_value, value)
|
pre_value, value = self._consistent_type_to_str(pre_value, value)
|
||||||
if sorted(str(value)) == sorted(str(pre_value)):
|
if sorted(str(value)) == sorted(str(pre_value)):
|
||||||
continue
|
continue
|
||||||
if pre_value:
|
before[key] = pre_value
|
||||||
before[key] = pre_value
|
after[key] = value
|
||||||
if value:
|
|
||||||
after[key] = value
|
|
||||||
return before, after
|
return before, after
|
||||||
|
|
||||||
def cache_instance_before_data(self, instance_dict):
|
def cache_instance_before_data(self, instance_dict):
|
||||||
|
|
|
@ -70,8 +70,10 @@ def _get_instance_field_value(
|
||||||
|
|
||||||
if getattr(f, 'primary_key', False):
|
if getattr(f, 'primary_key', False):
|
||||||
f.verbose_name = 'id'
|
f.verbose_name = 'id'
|
||||||
elif isinstance(value, (list, dict)):
|
elif isinstance(value, list):
|
||||||
value = copy.deepcopy(value)
|
value = copy.deepcopy(value)
|
||||||
|
elif isinstance(value, dict):
|
||||||
|
value = dict(copy.deepcopy(value))
|
||||||
elif isinstance(value, datetime):
|
elif isinstance(value, datetime):
|
||||||
value = as_current_tz(value).strftime('%Y-%m-%d %H:%M:%S')
|
value = as_current_tz(value).strftime('%Y-%m-%d %H:%M:%S')
|
||||||
elif isinstance(f, models.OneToOneField) and isinstance(value, models.Model):
|
elif isinstance(f, models.OneToOneField) and isinstance(value, models.Model):
|
||||||
|
|
|
@ -3,6 +3,7 @@ import json
|
||||||
import os
|
import os
|
||||||
import urllib.parse
|
import urllib.parse
|
||||||
|
|
||||||
|
from django.conf import settings
|
||||||
from django.http import HttpResponse
|
from django.http import HttpResponse
|
||||||
from django.shortcuts import get_object_or_404
|
from django.shortcuts import get_object_or_404
|
||||||
from django.utils import timezone
|
from django.utils import timezone
|
||||||
|
@ -12,14 +13,12 @@ from rest_framework.decorators import action
|
||||||
from rest_framework.exceptions import PermissionDenied
|
from rest_framework.exceptions import PermissionDenied
|
||||||
from rest_framework.request import Request
|
from rest_framework.request import Request
|
||||||
from rest_framework.response import Response
|
from rest_framework.response import Response
|
||||||
from rest_framework.serializers import ValidationError
|
|
||||||
|
|
||||||
from assets.const import CloudTypes
|
|
||||||
from common.api import JMSModelViewSet
|
from common.api import JMSModelViewSet
|
||||||
from common.exceptions import JMSException
|
from common.exceptions import JMSException
|
||||||
from common.utils import random_string, get_logger
|
from common.utils import random_string, get_logger
|
||||||
from common.utils.django import get_request_os
|
from common.utils.django import get_request_os
|
||||||
from common.utils.http import is_true
|
from common.utils.http import is_true, is_false
|
||||||
from orgs.mixins.api import RootOrgViewMixin
|
from orgs.mixins.api import RootOrgViewMixin
|
||||||
from perms.models import ActionChoices
|
from perms.models import ActionChoices
|
||||||
from terminal.connect_methods import NativeClient, ConnectMethodUtil
|
from terminal.connect_methods import NativeClient, ConnectMethodUtil
|
||||||
|
@ -27,7 +26,8 @@ from terminal.models import EndpointRule
|
||||||
from ..models import ConnectionToken, date_expired_default
|
from ..models import ConnectionToken, date_expired_default
|
||||||
from ..serializers import (
|
from ..serializers import (
|
||||||
ConnectionTokenSerializer, ConnectionTokenSecretSerializer,
|
ConnectionTokenSerializer, ConnectionTokenSecretSerializer,
|
||||||
SuperConnectionTokenSerializer, ConnectTokenAppletOptionSerializer
|
SuperConnectionTokenSerializer, ConnectTokenAppletOptionSerializer,
|
||||||
|
ConnectionTokenUpdateSerializer
|
||||||
)
|
)
|
||||||
|
|
||||||
__all__ = ['ConnectionTokenViewSet', 'SuperConnectionTokenViewSet']
|
__all__ = ['ConnectionTokenViewSet', 'SuperConnectionTokenViewSet']
|
||||||
|
@ -88,7 +88,8 @@ class RDPFileClientProtocolURLMixin:
|
||||||
if width and height:
|
if width and height:
|
||||||
rdp_options['desktopwidth:i'] = width
|
rdp_options['desktopwidth:i'] = width
|
||||||
rdp_options['desktopheight:i'] = height
|
rdp_options['desktopheight:i'] = height
|
||||||
rdp_options['winposstr:s:'] = f'0,1,0,0,{width},{height}'
|
rdp_options['winposstr:s'] = f'0,1,0,0,{width},{height}'
|
||||||
|
rdp_options['dynamic resolution:i'] = '0'
|
||||||
|
|
||||||
# 设置其他选项
|
# 设置其他选项
|
||||||
rdp_options['session bpp:i'] = os.getenv('JUMPSERVER_COLOR_DEPTH', '32')
|
rdp_options['session bpp:i'] = os.getenv('JUMPSERVER_COLOR_DEPTH', '32')
|
||||||
|
@ -99,6 +100,10 @@ class RDPFileClientProtocolURLMixin:
|
||||||
remote_app_options = token.get_remote_app_option()
|
remote_app_options = token.get_remote_app_option()
|
||||||
rdp_options.update(remote_app_options)
|
rdp_options.update(remote_app_options)
|
||||||
|
|
||||||
|
rdp = token.asset.platform.protocols.filter(name='rdp').first()
|
||||||
|
if rdp and rdp.setting.get('console'):
|
||||||
|
rdp_options['administrative session:i'] = '1'
|
||||||
|
|
||||||
# 文件名
|
# 文件名
|
||||||
name = token.asset.name
|
name = token.asset.name
|
||||||
prefix_name = f'{token.user.username}-{name}'
|
prefix_name = f'{token.user.username}-{name}'
|
||||||
|
@ -226,10 +231,14 @@ class ConnectionTokenViewSet(ExtraActionApiMixin, RootOrgViewMixin, JMSModelView
|
||||||
search_fields = filterset_fields
|
search_fields = filterset_fields
|
||||||
serializer_classes = {
|
serializer_classes = {
|
||||||
'default': ConnectionTokenSerializer,
|
'default': ConnectionTokenSerializer,
|
||||||
|
'update': ConnectionTokenUpdateSerializer,
|
||||||
|
'partial_update': ConnectionTokenUpdateSerializer,
|
||||||
}
|
}
|
||||||
|
http_method_names = ['get', 'post', 'patch', 'head', 'options', 'trace']
|
||||||
rbac_perms = {
|
rbac_perms = {
|
||||||
'list': 'authentication.view_connectiontoken',
|
'list': 'authentication.view_connectiontoken',
|
||||||
'retrieve': 'authentication.view_connectiontoken',
|
'retrieve': 'authentication.view_connectiontoken',
|
||||||
|
'update': 'authentication.change_connectiontoken',
|
||||||
'create': 'authentication.add_connectiontoken',
|
'create': 'authentication.add_connectiontoken',
|
||||||
'exchange': 'authentication.add_connectiontoken',
|
'exchange': 'authentication.add_connectiontoken',
|
||||||
'expire': 'authentication.change_connectiontoken',
|
'expire': 'authentication.change_connectiontoken',
|
||||||
|
@ -366,19 +375,26 @@ class SuperConnectionTokenViewSet(ConnectionTokenViewSet):
|
||||||
|
|
||||||
token_id = request.data.get('id') or ''
|
token_id = request.data.get('id') or ''
|
||||||
token = get_object_or_404(ConnectionToken, pk=token_id)
|
token = get_object_or_404(ConnectionToken, pk=token_id)
|
||||||
if token.is_expired:
|
|
||||||
raise ValidationError({'id': 'Token is expired'})
|
|
||||||
|
|
||||||
token.is_valid()
|
token.is_valid()
|
||||||
serializer = self.get_serializer(instance=token)
|
serializer = self.get_serializer(instance=token)
|
||||||
expire_now = request.data.get('expire_now', True)
|
|
||||||
|
|
||||||
# TODO 暂时特殊处理 k8s 不过期
|
expire_now = request.data.get('expire_now', None)
|
||||||
if token.asset.type == CloudTypes.K8S:
|
asset_type = token.asset.type
|
||||||
expire_now = False
|
# 设置默认值
|
||||||
|
if expire_now is None:
|
||||||
|
# TODO 暂时特殊处理 k8s 不过期
|
||||||
|
if asset_type in ['k8s', 'kubernetes']:
|
||||||
|
expire_now = False
|
||||||
|
else:
|
||||||
|
expire_now = not settings.CONNECTION_TOKEN_REUSABLE
|
||||||
|
|
||||||
if expire_now:
|
if is_false(expire_now):
|
||||||
|
logger.debug('Api specified, now expire now')
|
||||||
|
elif token.is_reusable and settings.CONNECTION_TOKEN_REUSABLE:
|
||||||
|
logger.debug('Token is reusable, not expire now')
|
||||||
|
else:
|
||||||
token.expire()
|
token.expire()
|
||||||
|
|
||||||
return Response(serializer.data, status=status.HTTP_200_OK)
|
return Response(serializer.data, status=status.HTTP_200_OK)
|
||||||
|
|
||||||
@action(methods=['POST'], detail=False, url_path='applet-option')
|
@action(methods=['POST'], detail=False, url_path='applet-option')
|
||||||
|
|
|
@ -104,7 +104,10 @@ class OAuth2Backend(JMSModelBackend):
|
||||||
headers = {
|
headers = {
|
||||||
'Accept': 'application/json'
|
'Accept': 'application/json'
|
||||||
}
|
}
|
||||||
access_token_response = requests_func(access_token_url, headers=headers)
|
if token_method == 'post':
|
||||||
|
access_token_response = requests_func(access_token_url, headers=headers, json=query_dict)
|
||||||
|
else:
|
||||||
|
access_token_response = requests_func(access_token_url, headers=headers)
|
||||||
try:
|
try:
|
||||||
access_token_response.raise_for_status()
|
access_token_response.raise_for_status()
|
||||||
access_token_response_data = access_token_response.json()
|
access_token_response_data = access_token_response.json()
|
||||||
|
|
|
@ -1,17 +1,16 @@
|
||||||
import base64
|
import base64
|
||||||
import time
|
|
||||||
|
|
||||||
|
from django.conf import settings
|
||||||
|
from django.contrib.auth import logout as auth_logout
|
||||||
|
from django.http import HttpResponse
|
||||||
from django.shortcuts import redirect, reverse, render
|
from django.shortcuts import redirect, reverse, render
|
||||||
from django.utils.deprecation import MiddlewareMixin
|
from django.utils.deprecation import MiddlewareMixin
|
||||||
from django.http import HttpResponse
|
|
||||||
from django.conf import settings
|
|
||||||
from django.utils.translation import ugettext as _
|
from django.utils.translation import ugettext as _
|
||||||
from django.contrib.auth import logout as auth_logout
|
|
||||||
|
|
||||||
from apps.authentication import mixins
|
from apps.authentication import mixins
|
||||||
|
from authentication.signals import post_auth_failed
|
||||||
from common.utils import gen_key_pair
|
from common.utils import gen_key_pair
|
||||||
from common.utils import get_request_ip
|
from common.utils import get_request_ip
|
||||||
from .signals import post_auth_failed
|
|
||||||
|
|
||||||
|
|
||||||
class MFAMiddleware:
|
class MFAMiddleware:
|
||||||
|
@ -76,12 +75,18 @@ class ThirdPartyLoginMiddleware(mixins.AuthMixin):
|
||||||
ip = get_request_ip(request)
|
ip = get_request_ip(request)
|
||||||
try:
|
try:
|
||||||
self.request = request
|
self.request = request
|
||||||
|
self._check_third_party_login_acl()
|
||||||
self._check_login_acl(request.user, ip)
|
self._check_login_acl(request.user, ip)
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
post_auth_failed.send(
|
if getattr(request, 'user_need_delete', False):
|
||||||
sender=self.__class__, username=request.user.username,
|
request.user.delete()
|
||||||
request=self.request, reason=e.msg
|
else:
|
||||||
)
|
error_message = getattr(e, 'msg', None)
|
||||||
|
error_message = error_message or str(e)
|
||||||
|
post_auth_failed.send(
|
||||||
|
sender=self.__class__, username=request.user.username,
|
||||||
|
request=self.request, reason=error_message
|
||||||
|
)
|
||||||
auth_logout(request)
|
auth_logout(request)
|
||||||
context = {
|
context = {
|
||||||
'title': _('Authentication failed'),
|
'title': _('Authentication failed'),
|
||||||
|
|
|
@ -0,0 +1,18 @@
|
||||||
|
# Generated by Django 3.2.17 on 2023-05-08 07:34
|
||||||
|
|
||||||
|
from django.db import migrations, models
|
||||||
|
|
||||||
|
|
||||||
|
class Migration(migrations.Migration):
|
||||||
|
|
||||||
|
dependencies = [
|
||||||
|
('authentication', '0018_alter_connectiontoken_input_secret'),
|
||||||
|
]
|
||||||
|
|
||||||
|
operations = [
|
||||||
|
migrations.AddField(
|
||||||
|
model_name='connectiontoken',
|
||||||
|
name='is_reusable',
|
||||||
|
field=models.BooleanField(default=False, verbose_name='Reusable'),
|
||||||
|
),
|
||||||
|
]
|
|
@ -54,6 +54,7 @@ def authenticate(request=None, **credentials):
|
||||||
"""
|
"""
|
||||||
username = credentials.get('username')
|
username = credentials.get('username')
|
||||||
|
|
||||||
|
temp_user = None
|
||||||
for backend, backend_path in _get_backends(return_tuples=True):
|
for backend, backend_path in _get_backends(return_tuples=True):
|
||||||
# 检查用户名是否允许认证 (预先检查,不浪费认证时间)
|
# 检查用户名是否允许认证 (预先检查,不浪费认证时间)
|
||||||
logger.info('Try using auth backend: {}'.format(str(backend)))
|
logger.info('Try using auth backend: {}'.format(str(backend)))
|
||||||
|
@ -77,11 +78,19 @@ def authenticate(request=None, **credentials):
|
||||||
|
|
||||||
# 检查用户是否允许认证
|
# 检查用户是否允许认证
|
||||||
if not backend.user_allow_authenticate(user):
|
if not backend.user_allow_authenticate(user):
|
||||||
|
temp_user = user
|
||||||
|
temp_user.backend = backend_path
|
||||||
continue
|
continue
|
||||||
|
|
||||||
# Annotate the user object with the path of the backend.
|
# Annotate the user object with the path of the backend.
|
||||||
user.backend = backend_path
|
user.backend = backend_path
|
||||||
return user
|
return user
|
||||||
|
else:
|
||||||
|
if temp_user is not None:
|
||||||
|
source_display = temp_user.source_display
|
||||||
|
request.error_message = _('''The administrator has enabled 'Only allow login from user source'.
|
||||||
|
The current user source is {}. Please contact the administrator.''').format(source_display)
|
||||||
|
return temp_user
|
||||||
|
|
||||||
# The credentials supplied are invalid to all backends, fire signal
|
# The credentials supplied are invalid to all backends, fire signal
|
||||||
user_login_failed.send(sender=__name__, credentials=_clean_credentials(credentials), request=request)
|
user_login_failed.send(sender=__name__, credentials=_clean_credentials(credentials), request=request)
|
||||||
|
@ -212,7 +221,8 @@ class MFAMixin:
|
||||||
self._do_check_user_mfa(code, mfa_type, user=user)
|
self._do_check_user_mfa(code, mfa_type, user=user)
|
||||||
|
|
||||||
def check_user_mfa_if_need(self, user):
|
def check_user_mfa_if_need(self, user):
|
||||||
if self.request.session.get('auth_mfa'):
|
if self.request.session.get('auth_mfa') and \
|
||||||
|
self.request.session.get('auth_mfa_username') == user.username:
|
||||||
return
|
return
|
||||||
if not user.mfa_enabled:
|
if not user.mfa_enabled:
|
||||||
return
|
return
|
||||||
|
@ -220,15 +230,16 @@ class MFAMixin:
|
||||||
active_mfa_names = user.active_mfa_backends_mapper.keys()
|
active_mfa_names = user.active_mfa_backends_mapper.keys()
|
||||||
raise errors.MFARequiredError(mfa_types=tuple(active_mfa_names))
|
raise errors.MFARequiredError(mfa_types=tuple(active_mfa_names))
|
||||||
|
|
||||||
def mark_mfa_ok(self, mfa_type):
|
def mark_mfa_ok(self, mfa_type, user):
|
||||||
self.request.session['auth_mfa'] = 1
|
self.request.session['auth_mfa'] = 1
|
||||||
|
self.request.session['auth_mfa_username'] = user.username
|
||||||
self.request.session['auth_mfa_time'] = time.time()
|
self.request.session['auth_mfa_time'] = time.time()
|
||||||
self.request.session['auth_mfa_required'] = 0
|
self.request.session['auth_mfa_required'] = 0
|
||||||
self.request.session['auth_mfa_type'] = mfa_type
|
self.request.session['auth_mfa_type'] = mfa_type
|
||||||
MFABlockUtils(self.request.user.username, self.get_request_ip()).clean_failed_count()
|
MFABlockUtils(user.username, self.get_request_ip()).clean_failed_count()
|
||||||
|
|
||||||
def clean_mfa_mark(self):
|
def clean_mfa_mark(self):
|
||||||
keys = ['auth_mfa', 'auth_mfa_time', 'auth_mfa_required', 'auth_mfa_type']
|
keys = ['auth_mfa', 'auth_mfa_time', 'auth_mfa_required', 'auth_mfa_type', 'auth_mfa_username']
|
||||||
for k in keys:
|
for k in keys:
|
||||||
self.request.session.pop(k, '')
|
self.request.session.pop(k, '')
|
||||||
|
|
||||||
|
@ -263,7 +274,7 @@ class MFAMixin:
|
||||||
ok, msg = mfa_backend.check_code(code)
|
ok, msg = mfa_backend.check_code(code)
|
||||||
|
|
||||||
if ok:
|
if ok:
|
||||||
self.mark_mfa_ok(mfa_type)
|
self.mark_mfa_ok(mfa_type, user)
|
||||||
return
|
return
|
||||||
|
|
||||||
raise errors.MFAFailedError(
|
raise errors.MFAFailedError(
|
||||||
|
@ -345,6 +356,13 @@ class AuthACLMixin:
|
||||||
self.request.session['auth_acl_id'] = str(acl.id)
|
self.request.session['auth_acl_id'] = str(acl.id)
|
||||||
return
|
return
|
||||||
|
|
||||||
|
def _check_third_party_login_acl(self):
|
||||||
|
request = self.request
|
||||||
|
error_message = getattr(request, 'error_message', None)
|
||||||
|
if not error_message:
|
||||||
|
return
|
||||||
|
raise ValueError(error_message)
|
||||||
|
|
||||||
def check_user_login_confirm_if_need(self, user):
|
def check_user_login_confirm_if_need(self, user):
|
||||||
if not self.request.session.get("auth_confirm_required"):
|
if not self.request.session.get("auth_confirm_required"):
|
||||||
return
|
return
|
||||||
|
|
|
@ -40,6 +40,7 @@ class ConnectionToken(JMSOrgBaseModel):
|
||||||
connect_method = models.CharField(max_length=32, verbose_name=_("Connect method"))
|
connect_method = models.CharField(max_length=32, verbose_name=_("Connect method"))
|
||||||
user_display = models.CharField(max_length=128, default='', verbose_name=_("User display"))
|
user_display = models.CharField(max_length=128, default='', verbose_name=_("User display"))
|
||||||
asset_display = models.CharField(max_length=128, default='', verbose_name=_("Asset display"))
|
asset_display = models.CharField(max_length=128, default='', verbose_name=_("Asset display"))
|
||||||
|
is_reusable = models.BooleanField(default=False, verbose_name=_("Reusable"))
|
||||||
date_expired = models.DateTimeField(default=date_expired_default, verbose_name=_("Date expired"))
|
date_expired = models.DateTimeField(default=date_expired_default, verbose_name=_("Date expired"))
|
||||||
from_ticket = models.OneToOneField(
|
from_ticket = models.OneToOneField(
|
||||||
'tickets.ApplyLoginAssetTicket', related_name='connection_token',
|
'tickets.ApplyLoginAssetTicket', related_name='connection_token',
|
||||||
|
@ -74,7 +75,7 @@ class ConnectionToken(JMSOrgBaseModel):
|
||||||
|
|
||||||
def expire(self):
|
def expire(self):
|
||||||
self.date_expired = timezone.now()
|
self.date_expired = timezone.now()
|
||||||
self.save()
|
self.save(update_fields=['date_expired'])
|
||||||
|
|
||||||
def renewal(self):
|
def renewal(self):
|
||||||
""" 续期 Token,将来支持用户自定义创建 token 后,续期策略要修改 """
|
""" 续期 Token,将来支持用户自定义创建 token 后,续期策略要修改 """
|
||||||
|
@ -108,9 +109,8 @@ class ConnectionToken(JMSOrgBaseModel):
|
||||||
error = _('No user or invalid user')
|
error = _('No user or invalid user')
|
||||||
raise PermissionDenied(error)
|
raise PermissionDenied(error)
|
||||||
if not self.asset or not self.asset.is_active:
|
if not self.asset or not self.asset.is_active:
|
||||||
is_valid = False
|
|
||||||
error = _('No asset or inactive asset')
|
error = _('No asset or inactive asset')
|
||||||
return is_valid, error
|
raise PermissionDenied(error)
|
||||||
if not self.account:
|
if not self.account:
|
||||||
error = _('No account')
|
error = _('No account')
|
||||||
raise PermissionDenied(error)
|
raise PermissionDenied(error)
|
||||||
|
@ -160,6 +160,7 @@ class ConnectionToken(JMSOrgBaseModel):
|
||||||
'remoteapplicationname:s': app,
|
'remoteapplicationname:s': app,
|
||||||
'alternate shell:s': app,
|
'alternate shell:s': app,
|
||||||
'remoteapplicationcmdline:s': cmdline_b64,
|
'remoteapplicationcmdline:s': cmdline_b64,
|
||||||
|
'disableconnectionsharing:i': '1',
|
||||||
}
|
}
|
||||||
return options
|
return options
|
||||||
|
|
||||||
|
@ -172,7 +173,7 @@ class ConnectionToken(JMSOrgBaseModel):
|
||||||
if not applet:
|
if not applet:
|
||||||
return None
|
return None
|
||||||
|
|
||||||
host_account = applet.select_host_account()
|
host_account = applet.select_host_account(self.user)
|
||||||
if not host_account:
|
if not host_account:
|
||||||
raise JMSException({'error': 'No host account available'})
|
raise JMSException({'error': 'No host account available'})
|
||||||
|
|
||||||
|
|
|
@ -1,13 +1,16 @@
|
||||||
|
from django.conf import settings
|
||||||
|
from django.utils import timezone
|
||||||
from django.utils.translation import ugettext_lazy as _
|
from django.utils.translation import ugettext_lazy as _
|
||||||
from rest_framework import serializers
|
from rest_framework import serializers
|
||||||
|
|
||||||
from perms.serializers.permission import ActionChoicesField
|
|
||||||
from orgs.mixins.serializers import OrgResourceModelSerializerMixin
|
|
||||||
from common.serializers.fields import EncryptedField
|
from common.serializers.fields import EncryptedField
|
||||||
|
from orgs.mixins.serializers import OrgResourceModelSerializerMixin
|
||||||
|
from perms.serializers.permission import ActionChoicesField
|
||||||
from ..models import ConnectionToken
|
from ..models import ConnectionToken
|
||||||
|
|
||||||
__all__ = [
|
__all__ = [
|
||||||
'ConnectionTokenSerializer', 'SuperConnectionTokenSerializer',
|
'ConnectionTokenSerializer', 'SuperConnectionTokenSerializer',
|
||||||
|
'ConnectionTokenUpdateSerializer',
|
||||||
]
|
]
|
||||||
|
|
||||||
|
|
||||||
|
@ -25,13 +28,13 @@ class ConnectionTokenSerializer(OrgResourceModelSerializerMixin):
|
||||||
fields_small = fields_mini + [
|
fields_small = fields_mini + [
|
||||||
'user', 'asset', 'account', 'input_username',
|
'user', 'asset', 'account', 'input_username',
|
||||||
'input_secret', 'connect_method', 'protocol', 'actions',
|
'input_secret', 'connect_method', 'protocol', 'actions',
|
||||||
'is_active', 'from_ticket', 'from_ticket_info',
|
'is_active', 'is_reusable', 'from_ticket', 'from_ticket_info',
|
||||||
'date_expired', 'date_created', 'date_updated', 'created_by',
|
'date_expired', 'date_created', 'date_updated', 'created_by',
|
||||||
'updated_by', 'org_id', 'org_name',
|
'updated_by', 'org_id', 'org_name',
|
||||||
]
|
]
|
||||||
read_only_fields = [
|
read_only_fields = [
|
||||||
# 普通 Token 不支持指定 user
|
# 普通 Token 不支持指定 user
|
||||||
'user', 'expire_time', 'is_expired',
|
'user', 'expire_time', 'is_expired', 'date_expired',
|
||||||
'user_display', 'asset_display',
|
'user_display', 'asset_display',
|
||||||
]
|
]
|
||||||
fields = fields_small + read_only_fields
|
fields = fields_small + read_only_fields
|
||||||
|
@ -57,6 +60,32 @@ class ConnectionTokenSerializer(OrgResourceModelSerializerMixin):
|
||||||
return info
|
return info
|
||||||
|
|
||||||
|
|
||||||
|
class ConnectionTokenUpdateSerializer(ConnectionTokenSerializer):
|
||||||
|
class Meta(ConnectionTokenSerializer.Meta):
|
||||||
|
can_update_fields = ['is_reusable']
|
||||||
|
read_only_fields = list(set(ConnectionTokenSerializer.Meta.fields) - set(can_update_fields))
|
||||||
|
|
||||||
|
def _get_date_expired(self):
|
||||||
|
delta = self.instance.date_expired - self.instance.date_created
|
||||||
|
if delta.total_seconds() > 3600 * 24:
|
||||||
|
return self.instance.date_expired
|
||||||
|
|
||||||
|
seconds = settings.CONNECTION_TOKEN_EXPIRATION_MAX
|
||||||
|
return timezone.now() + timezone.timedelta(seconds=seconds)
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def validate_is_reusable(value):
|
||||||
|
if value and not settings.CONNECTION_TOKEN_REUSABLE:
|
||||||
|
raise serializers.ValidationError(_('Reusable connection token is not allowed, global setting not enabled'))
|
||||||
|
return value
|
||||||
|
|
||||||
|
def validate(self, attrs):
|
||||||
|
reusable = attrs.get('is_reusable', False)
|
||||||
|
if reusable:
|
||||||
|
attrs['date_expired'] = self._get_date_expired()
|
||||||
|
return attrs
|
||||||
|
|
||||||
|
|
||||||
class SuperConnectionTokenSerializer(ConnectionTokenSerializer):
|
class SuperConnectionTokenSerializer(ConnectionTokenSerializer):
|
||||||
class Meta(ConnectionTokenSerializer.Meta):
|
class Meta(ConnectionTokenSerializer.Meta):
|
||||||
read_only_fields = list(set(ConnectionTokenSerializer.Meta.read_only_fields) - {'user'})
|
read_only_fields = list(set(ConnectionTokenSerializer.Meta.read_only_fields) - {'user'})
|
||||||
|
|
|
@ -0,0 +1,98 @@
|
||||||
|
from functools import lru_cache
|
||||||
|
|
||||||
|
from rest_framework.request import Request
|
||||||
|
from django.utils.translation import ugettext_lazy as _
|
||||||
|
from django.utils.module_loading import import_string
|
||||||
|
from django.conf import settings
|
||||||
|
from django.db.utils import IntegrityError
|
||||||
|
from django.views import View
|
||||||
|
|
||||||
|
from authentication import errors
|
||||||
|
from authentication.mixins import AuthMixin
|
||||||
|
from users.models import User
|
||||||
|
from common.utils.django import reverse, get_object_or_none
|
||||||
|
from common.utils import get_logger
|
||||||
|
|
||||||
|
from .mixins import FlashMessageMixin
|
||||||
|
|
||||||
|
|
||||||
|
logger = get_logger(__file__)
|
||||||
|
|
||||||
|
|
||||||
|
class BaseLoginCallbackView(AuthMixin, FlashMessageMixin, View):
|
||||||
|
|
||||||
|
client_type_path = ''
|
||||||
|
client_auth_params = {}
|
||||||
|
user_type = ''
|
||||||
|
auth_backend = None
|
||||||
|
# 提示信息
|
||||||
|
msg_client_err = _('Error')
|
||||||
|
msg_user_not_bound_err = _('Error')
|
||||||
|
msg_not_found_user_from_client_err = _('Error')
|
||||||
|
|
||||||
|
def verify_state(self):
|
||||||
|
raise NotImplementedError
|
||||||
|
|
||||||
|
def get_verify_state_failed_response(self, redirect_uri):
|
||||||
|
raise NotImplementedError
|
||||||
|
|
||||||
|
@property
|
||||||
|
@lru_cache(maxsize=1)
|
||||||
|
def client(self):
|
||||||
|
if not all([self.client_type_path, self.client_auth_params]):
|
||||||
|
raise NotImplementedError
|
||||||
|
client_init = {k: getattr(settings, v) for k, v in self.client_auth_params.items()}
|
||||||
|
client_type = import_string(self.client_type_path)
|
||||||
|
return client_type(**client_init)
|
||||||
|
|
||||||
|
def create_user_if_not_exist(self, user_id, **kwargs):
|
||||||
|
user = None
|
||||||
|
user_attr = self.client.get_user_detail(user_id, **kwargs)
|
||||||
|
try:
|
||||||
|
user, create = User.objects.get_or_create(
|
||||||
|
username=user_attr['username'], defaults=user_attr
|
||||||
|
)
|
||||||
|
setattr(user, f'{self.user_type}_id', user_id)
|
||||||
|
if create:
|
||||||
|
setattr(user, 'source', self.user_type)
|
||||||
|
user.save()
|
||||||
|
except IntegrityError as err:
|
||||||
|
logger.error(f'{self.msg_client_err}: create user error: {err}')
|
||||||
|
|
||||||
|
if user is None:
|
||||||
|
title = self.msg_client_err
|
||||||
|
msg = _('If you have any question, please contact the administrator')
|
||||||
|
return user, (title, msg)
|
||||||
|
|
||||||
|
return user, None
|
||||||
|
|
||||||
|
def get(self, request: Request):
|
||||||
|
code = request.GET.get('code')
|
||||||
|
redirect_url = request.GET.get('redirect_url')
|
||||||
|
login_url = reverse('authentication:login')
|
||||||
|
|
||||||
|
if not self.verify_state():
|
||||||
|
return self.get_verify_state_failed_response(redirect_url)
|
||||||
|
|
||||||
|
user_id, other_info = self.client.get_user_id_by_code(code)
|
||||||
|
if not user_id:
|
||||||
|
# 正常流程不会出这个错误,hack 行为
|
||||||
|
err = self.msg_not_found_user_from_client_err
|
||||||
|
response = self.get_failed_response(login_url, title=err, msg=err)
|
||||||
|
return response
|
||||||
|
|
||||||
|
user = get_object_or_none(User, **{f'{self.user_type}_id': user_id})
|
||||||
|
if user is None:
|
||||||
|
user, err = self.create_user_if_not_exist(user_id, other_info=other_info)
|
||||||
|
if err is not None:
|
||||||
|
response = self.get_failed_response(login_url, title=err[0], msg=err[1])
|
||||||
|
return response
|
||||||
|
|
||||||
|
try:
|
||||||
|
self.check_oauth2_auth(user, getattr(settings, self.auth_backend))
|
||||||
|
except errors.AuthFailedError as e:
|
||||||
|
self.set_login_failed_mark()
|
||||||
|
msg = e.msg
|
||||||
|
response = self.get_failed_response(login_url, title=msg, msg=msg)
|
||||||
|
return response
|
||||||
|
return self.redirect_to_guard_view()
|
|
@ -16,21 +16,22 @@ from authentication.notifications import OAuthBindMessage
|
||||||
from common.views.mixins import PermissionsMixin, UserConfirmRequiredExceptionMixin
|
from common.views.mixins import PermissionsMixin, UserConfirmRequiredExceptionMixin
|
||||||
from common.permissions import UserConfirmation
|
from common.permissions import UserConfirmation
|
||||||
from common.sdk.im.dingtalk import URL, DingTalk
|
from common.sdk.im.dingtalk import URL, DingTalk
|
||||||
from common.utils import FlashMessageUtil, get_logger
|
from common.utils import get_logger
|
||||||
from common.utils.common import get_request_ip
|
from common.utils.common import get_request_ip
|
||||||
from common.utils.django import get_object_or_none, reverse
|
from common.utils.django import get_object_or_none, reverse
|
||||||
from common.utils.random import random_string
|
from common.utils.random import random_string
|
||||||
from users.models import User
|
from users.models import User
|
||||||
from users.views import UserVerifyPasswordView
|
from users.views import UserVerifyPasswordView
|
||||||
|
|
||||||
from .mixins import METAMixin
|
from .base import BaseLoginCallbackView
|
||||||
|
from .mixins import METAMixin, FlashMessageMixin
|
||||||
|
|
||||||
logger = get_logger(__file__)
|
logger = get_logger(__file__)
|
||||||
|
|
||||||
DINGTALK_STATE_SESSION_KEY = '_dingtalk_state'
|
DINGTALK_STATE_SESSION_KEY = '_dingtalk_state'
|
||||||
|
|
||||||
|
|
||||||
class DingTalkBaseMixin(UserConfirmRequiredExceptionMixin, PermissionsMixin, View):
|
class DingTalkBaseMixin(UserConfirmRequiredExceptionMixin, PermissionsMixin, FlashMessageMixin, View):
|
||||||
def dispatch(self, request, *args, **kwargs):
|
def dispatch(self, request, *args, **kwargs):
|
||||||
try:
|
try:
|
||||||
return super().dispatch(request, *args, **kwargs)
|
return super().dispatch(request, *args, **kwargs)
|
||||||
|
@ -56,26 +57,6 @@ class DingTalkBaseMixin(UserConfirmRequiredExceptionMixin, PermissionsMixin, Vie
|
||||||
msg = _("The system configuration is incorrect. Please contact your administrator")
|
msg = _("The system configuration is incorrect. Please contact your administrator")
|
||||||
return self.get_failed_response(redirect_uri, msg, msg)
|
return self.get_failed_response(redirect_uri, msg, msg)
|
||||||
|
|
||||||
@staticmethod
|
|
||||||
def get_success_response(redirect_url, title, msg):
|
|
||||||
message_data = {
|
|
||||||
'title': title,
|
|
||||||
'message': msg,
|
|
||||||
'interval': 5,
|
|
||||||
'redirect_url': redirect_url,
|
|
||||||
}
|
|
||||||
return FlashMessageUtil.gen_and_redirect_to(message_data)
|
|
||||||
|
|
||||||
@staticmethod
|
|
||||||
def get_failed_response(redirect_url, title, msg):
|
|
||||||
message_data = {
|
|
||||||
'title': title,
|
|
||||||
'error': msg,
|
|
||||||
'interval': 5,
|
|
||||||
'redirect_url': redirect_url,
|
|
||||||
}
|
|
||||||
return FlashMessageUtil.gen_and_redirect_to(message_data)
|
|
||||||
|
|
||||||
def get_already_bound_response(self, redirect_url):
|
def get_already_bound_response(self, redirect_url):
|
||||||
msg = _('DingTalk is already bound')
|
msg = _('DingTalk is already bound')
|
||||||
response = self.get_failed_response(redirect_url, msg, msg)
|
response = self.get_failed_response(redirect_url, msg, msg)
|
||||||
|
@ -158,7 +139,7 @@ class DingTalkQRBindCallbackView(DingTalkQRMixin, View):
|
||||||
appsecret=settings.DINGTALK_APPSECRET,
|
appsecret=settings.DINGTALK_APPSECRET,
|
||||||
agentid=settings.DINGTALK_AGENTID
|
agentid=settings.DINGTALK_AGENTID
|
||||||
)
|
)
|
||||||
userid = dingtalk.get_userid_by_code(code)
|
userid, __ = dingtalk.get_user_id_by_code(code)
|
||||||
|
|
||||||
if not userid:
|
if not userid:
|
||||||
msg = _('DingTalk query user failed')
|
msg = _('DingTalk query user failed')
|
||||||
|
@ -214,45 +195,20 @@ class DingTalkQRLoginView(DingTalkQRMixin, METAMixin, View):
|
||||||
return HttpResponseRedirect(url)
|
return HttpResponseRedirect(url)
|
||||||
|
|
||||||
|
|
||||||
class DingTalkQRLoginCallbackView(AuthMixin, DingTalkQRMixin, View):
|
class DingTalkQRLoginCallbackView(DingTalkQRMixin, BaseLoginCallbackView):
|
||||||
permission_classes = (AllowAny,)
|
permission_classes = (AllowAny,)
|
||||||
|
|
||||||
def get(self, request: HttpRequest):
|
client_type_path = 'common.sdk.im.dingtalk.DingTalk'
|
||||||
code = request.GET.get('code')
|
client_auth_params = {
|
||||||
redirect_url = request.GET.get('redirect_url')
|
'appid': 'DINGTALK_APPKEY', 'appsecret': 'DINGTALK_APPSECRET',
|
||||||
login_url = reverse('authentication:login')
|
'agentid': 'DINGTALK_AGENTID'
|
||||||
|
}
|
||||||
|
user_type = 'dingtalk'
|
||||||
|
auth_backend = 'AUTH_BACKEND_DINGTALK'
|
||||||
|
|
||||||
if not self.verify_state():
|
msg_client_err = _('DingTalk Error')
|
||||||
return self.get_verify_state_failed_response(redirect_url)
|
msg_user_not_bound_err = _('DingTalk is not bound')
|
||||||
|
msg_not_found_user_from_client_err = _('Failed to get user from DingTalk')
|
||||||
dingtalk = DingTalk(
|
|
||||||
appid=settings.DINGTALK_APPKEY,
|
|
||||||
appsecret=settings.DINGTALK_APPSECRET,
|
|
||||||
agentid=settings.DINGTALK_AGENTID
|
|
||||||
)
|
|
||||||
userid = dingtalk.get_userid_by_code(code)
|
|
||||||
if not userid:
|
|
||||||
# 正常流程不会出这个错误,hack 行为
|
|
||||||
msg = _('Failed to get user from DingTalk')
|
|
||||||
response = self.get_failed_response(login_url, title=msg, msg=msg)
|
|
||||||
return response
|
|
||||||
|
|
||||||
user = get_object_or_none(User, dingtalk_id=userid)
|
|
||||||
if user is None:
|
|
||||||
title = _('DingTalk is not bound')
|
|
||||||
msg = _('Please login with a password and then bind the DingTalk')
|
|
||||||
response = self.get_failed_response(login_url, title=title, msg=msg)
|
|
||||||
return response
|
|
||||||
|
|
||||||
try:
|
|
||||||
self.check_oauth2_auth(user, settings.AUTH_BACKEND_DINGTALK)
|
|
||||||
except errors.AuthFailedError as e:
|
|
||||||
self.set_login_failed_mark()
|
|
||||||
msg = e.msg
|
|
||||||
response = self.get_failed_response(login_url, title=msg, msg=msg)
|
|
||||||
return response
|
|
||||||
|
|
||||||
return self.redirect_to_guard_view()
|
|
||||||
|
|
||||||
|
|
||||||
class DingTalkOAuthLoginView(DingTalkOAuthMixin, View):
|
class DingTalkOAuthLoginView(DingTalkOAuthMixin, View):
|
||||||
|
@ -284,7 +240,7 @@ class DingTalkOAuthLoginCallbackView(AuthMixin, DingTalkOAuthMixin, View):
|
||||||
appsecret=settings.DINGTALK_APPSECRET,
|
appsecret=settings.DINGTALK_APPSECRET,
|
||||||
agentid=settings.DINGTALK_AGENTID
|
agentid=settings.DINGTALK_AGENTID
|
||||||
)
|
)
|
||||||
userid = dingtalk.get_userid_by_code(code)
|
userid, __ = dingtalk.get_user_id_by_code(code)
|
||||||
if not userid:
|
if not userid:
|
||||||
# 正常流程不会出这个错误,hack 行为
|
# 正常流程不会出这个错误,hack 行为
|
||||||
msg = _('Failed to get user from DingTalk')
|
msg = _('Failed to get user from DingTalk')
|
||||||
|
|
|
@ -9,26 +9,26 @@ from django.views import View
|
||||||
from rest_framework.exceptions import APIException
|
from rest_framework.exceptions import APIException
|
||||||
from rest_framework.permissions import AllowAny, IsAuthenticated
|
from rest_framework.permissions import AllowAny, IsAuthenticated
|
||||||
|
|
||||||
from authentication import errors
|
|
||||||
from authentication.const import ConfirmType
|
from authentication.const import ConfirmType
|
||||||
from authentication.mixins import AuthMixin
|
|
||||||
from authentication.notifications import OAuthBindMessage
|
from authentication.notifications import OAuthBindMessage
|
||||||
from common.views.mixins import PermissionsMixin, UserConfirmRequiredExceptionMixin
|
from common.views.mixins import PermissionsMixin, UserConfirmRequiredExceptionMixin
|
||||||
from common.permissions import UserConfirmation
|
from common.permissions import UserConfirmation
|
||||||
from common.sdk.im.feishu import URL, FeiShu
|
from common.sdk.im.feishu import URL, FeiShu
|
||||||
from common.utils import FlashMessageUtil, get_logger
|
from common.utils import get_logger
|
||||||
from common.utils.common import get_request_ip
|
from common.utils.common import get_request_ip
|
||||||
from common.utils.django import get_object_or_none, reverse
|
from common.utils.django import reverse
|
||||||
from common.utils.random import random_string
|
from common.utils.random import random_string
|
||||||
from users.models import User
|
|
||||||
from users.views import UserVerifyPasswordView
|
from users.views import UserVerifyPasswordView
|
||||||
|
|
||||||
|
from .base import BaseLoginCallbackView
|
||||||
|
from .mixins import FlashMessageMixin
|
||||||
|
|
||||||
logger = get_logger(__file__)
|
logger = get_logger(__file__)
|
||||||
|
|
||||||
FEISHU_STATE_SESSION_KEY = '_feishu_state'
|
FEISHU_STATE_SESSION_KEY = '_feishu_state'
|
||||||
|
|
||||||
|
|
||||||
class FeiShuQRMixin(UserConfirmRequiredExceptionMixin, PermissionsMixin, View):
|
class FeiShuQRMixin(UserConfirmRequiredExceptionMixin, PermissionsMixin, FlashMessageMixin, View):
|
||||||
def dispatch(self, request, *args, **kwargs):
|
def dispatch(self, request, *args, **kwargs):
|
||||||
try:
|
try:
|
||||||
return super().dispatch(request, *args, **kwargs)
|
return super().dispatch(request, *args, **kwargs)
|
||||||
|
@ -63,26 +63,6 @@ class FeiShuQRMixin(UserConfirmRequiredExceptionMixin, PermissionsMixin, View):
|
||||||
url = URL().authen + '?' + urlencode(params)
|
url = URL().authen + '?' + urlencode(params)
|
||||||
return url
|
return url
|
||||||
|
|
||||||
@staticmethod
|
|
||||||
def get_success_response(redirect_url, title, msg):
|
|
||||||
message_data = {
|
|
||||||
'title': title,
|
|
||||||
'message': msg,
|
|
||||||
'interval': 5,
|
|
||||||
'redirect_url': redirect_url,
|
|
||||||
}
|
|
||||||
return FlashMessageUtil.gen_and_redirect_to(message_data)
|
|
||||||
|
|
||||||
@staticmethod
|
|
||||||
def get_failed_response(redirect_url, title, msg):
|
|
||||||
message_data = {
|
|
||||||
'title': title,
|
|
||||||
'error': msg,
|
|
||||||
'interval': 5,
|
|
||||||
'redirect_url': redirect_url,
|
|
||||||
}
|
|
||||||
return FlashMessageUtil.gen_and_redirect_to(message_data)
|
|
||||||
|
|
||||||
def get_already_bound_response(self, redirect_url):
|
def get_already_bound_response(self, redirect_url):
|
||||||
msg = _('FeiShu is already bound')
|
msg = _('FeiShu is already bound')
|
||||||
response = self.get_failed_response(redirect_url, msg, msg)
|
response = self.get_failed_response(redirect_url, msg, msg)
|
||||||
|
@ -93,7 +73,6 @@ class FeiShuQRBindView(FeiShuQRMixin, View):
|
||||||
permission_classes = (IsAuthenticated, UserConfirmation.require(ConfirmType.ReLogin))
|
permission_classes = (IsAuthenticated, UserConfirmation.require(ConfirmType.ReLogin))
|
||||||
|
|
||||||
def get(self, request: HttpRequest):
|
def get(self, request: HttpRequest):
|
||||||
user = request.user
|
|
||||||
redirect_url = request.GET.get('redirect_url')
|
redirect_url = request.GET.get('redirect_url')
|
||||||
|
|
||||||
redirect_uri = reverse('authentication:feishu-qr-bind-callback', external=True)
|
redirect_uri = reverse('authentication:feishu-qr-bind-callback', external=True)
|
||||||
|
@ -123,7 +102,7 @@ class FeiShuQRBindCallbackView(FeiShuQRMixin, View):
|
||||||
app_id=settings.FEISHU_APP_ID,
|
app_id=settings.FEISHU_APP_ID,
|
||||||
app_secret=settings.FEISHU_APP_SECRET
|
app_secret=settings.FEISHU_APP_SECRET
|
||||||
)
|
)
|
||||||
user_id = feishu.get_user_id_by_code(code)
|
user_id, __ = feishu.get_user_id_by_code(code)
|
||||||
|
|
||||||
if not user_id:
|
if not user_id:
|
||||||
msg = _('FeiShu query user failed')
|
msg = _('FeiShu query user failed')
|
||||||
|
@ -176,41 +155,15 @@ class FeiShuQRLoginView(FeiShuQRMixin, View):
|
||||||
return HttpResponseRedirect(url)
|
return HttpResponseRedirect(url)
|
||||||
|
|
||||||
|
|
||||||
class FeiShuQRLoginCallbackView(AuthMixin, FeiShuQRMixin, View):
|
class FeiShuQRLoginCallbackView(FeiShuQRMixin, BaseLoginCallbackView):
|
||||||
permission_classes = (AllowAny,)
|
permission_classes = (AllowAny,)
|
||||||
|
|
||||||
def get(self, request: HttpRequest):
|
client_type_path = 'common.sdk.im.feishu.FeiShu'
|
||||||
code = request.GET.get('code')
|
client_auth_params = {'app_id': 'FEISHU_APP_ID', 'app_secret': 'FEISHU_APP_SECRET'}
|
||||||
redirect_url = request.GET.get('redirect_url')
|
user_type = 'feishu'
|
||||||
login_url = reverse('authentication:login')
|
auth_backend = 'AUTH_BACKEND_FEISHU'
|
||||||
|
|
||||||
if not self.verify_state():
|
msg_client_err = _('FeiShu Error')
|
||||||
return self.get_verify_state_failed_response(redirect_url)
|
msg_user_not_bound_err = _('FeiShu is not bound')
|
||||||
|
msg_not_found_user_from_client_err = _('Failed to get user from FeiShu')
|
||||||
|
|
||||||
feishu = FeiShu(
|
|
||||||
app_id=settings.FEISHU_APP_ID,
|
|
||||||
app_secret=settings.FEISHU_APP_SECRET
|
|
||||||
)
|
|
||||||
user_id = feishu.get_user_id_by_code(code)
|
|
||||||
if not user_id:
|
|
||||||
# 正常流程不会出这个错误,hack 行为
|
|
||||||
msg = _('Failed to get user from FeiShu')
|
|
||||||
response = self.get_failed_response(login_url, title=msg, msg=msg)
|
|
||||||
return response
|
|
||||||
|
|
||||||
user = get_object_or_none(User, feishu_id=user_id)
|
|
||||||
if user is None:
|
|
||||||
title = _('FeiShu is not bound')
|
|
||||||
msg = _('Please login with a password and then bind the FeiShu')
|
|
||||||
response = self.get_failed_response(login_url, title=title, msg=msg)
|
|
||||||
return response
|
|
||||||
|
|
||||||
try:
|
|
||||||
self.check_oauth2_auth(user, settings.AUTH_BACKEND_FEISHU)
|
|
||||||
except errors.AuthFailedError as e:
|
|
||||||
self.set_login_failed_mark()
|
|
||||||
msg = e.msg
|
|
||||||
response = self.get_failed_response(login_url, title=msg, msg=msg)
|
|
||||||
return response
|
|
||||||
|
|
||||||
return self.redirect_to_guard_view()
|
|
||||||
|
|
|
@ -1,5 +1,7 @@
|
||||||
# -*- coding: utf-8 -*-
|
# -*- coding: utf-8 -*-
|
||||||
#
|
#
|
||||||
|
from common.utils import FlashMessageUtil
|
||||||
|
|
||||||
|
|
||||||
class METAMixin:
|
class METAMixin:
|
||||||
def get_next_url_from_meta(self):
|
def get_next_url_from_meta(self):
|
||||||
|
@ -10,3 +12,16 @@ class METAMixin:
|
||||||
if len(next_url_item) > 1:
|
if len(next_url_item) > 1:
|
||||||
next_url = next_url_item[-1]
|
next_url = next_url_item[-1]
|
||||||
return next_url
|
return next_url
|
||||||
|
|
||||||
|
|
||||||
|
class FlashMessageMixin:
|
||||||
|
@staticmethod
|
||||||
|
def get_response(redirect_url, title, msg, m_type='message'):
|
||||||
|
message_data = {'title': title, 'interval': 5, 'redirect_url': redirect_url, m_type: msg}
|
||||||
|
return FlashMessageUtil.gen_and_redirect_to(message_data)
|
||||||
|
|
||||||
|
def get_success_response(self, redirect_url, title, msg):
|
||||||
|
return self.get_response(redirect_url, title, msg)
|
||||||
|
|
||||||
|
def get_failed_response(self, redirect_url, title, msg):
|
||||||
|
return self.get_response(redirect_url, title, msg, 'error')
|
||||||
|
|
|
@ -10,7 +10,7 @@ from rest_framework.exceptions import APIException
|
||||||
|
|
||||||
from users.models import User
|
from users.models import User
|
||||||
from users.views import UserVerifyPasswordView
|
from users.views import UserVerifyPasswordView
|
||||||
from common.utils import get_logger, FlashMessageUtil
|
from common.utils import get_logger
|
||||||
from common.utils.random import random_string
|
from common.utils.random import random_string
|
||||||
from common.utils.django import reverse, get_object_or_none
|
from common.utils.django import reverse, get_object_or_none
|
||||||
from common.sdk.im.wecom import URL
|
from common.sdk.im.wecom import URL
|
||||||
|
@ -22,14 +22,16 @@ from authentication import errors
|
||||||
from authentication.mixins import AuthMixin
|
from authentication.mixins import AuthMixin
|
||||||
from authentication.const import ConfirmType
|
from authentication.const import ConfirmType
|
||||||
from authentication.notifications import OAuthBindMessage
|
from authentication.notifications import OAuthBindMessage
|
||||||
from .mixins import METAMixin
|
|
||||||
|
from .base import BaseLoginCallbackView
|
||||||
|
from .mixins import METAMixin, FlashMessageMixin
|
||||||
|
|
||||||
logger = get_logger(__file__)
|
logger = get_logger(__file__)
|
||||||
|
|
||||||
WECOM_STATE_SESSION_KEY = '_wecom_state'
|
WECOM_STATE_SESSION_KEY = '_wecom_state'
|
||||||
|
|
||||||
|
|
||||||
class WeComBaseMixin(UserConfirmRequiredExceptionMixin, PermissionsMixin, View):
|
class WeComBaseMixin(UserConfirmRequiredExceptionMixin, PermissionsMixin, FlashMessageMixin, View):
|
||||||
def dispatch(self, request, *args, **kwargs):
|
def dispatch(self, request, *args, **kwargs):
|
||||||
try:
|
try:
|
||||||
return super().dispatch(request, *args, **kwargs)
|
return super().dispatch(request, *args, **kwargs)
|
||||||
|
@ -55,26 +57,6 @@ class WeComBaseMixin(UserConfirmRequiredExceptionMixin, PermissionsMixin, View):
|
||||||
msg = _("The system configuration is incorrect. Please contact your administrator")
|
msg = _("The system configuration is incorrect. Please contact your administrator")
|
||||||
return self.get_failed_response(redirect_uri, msg, msg)
|
return self.get_failed_response(redirect_uri, msg, msg)
|
||||||
|
|
||||||
@staticmethod
|
|
||||||
def get_success_response(redirect_url, title, msg):
|
|
||||||
message_data = {
|
|
||||||
'title': title,
|
|
||||||
'message': msg,
|
|
||||||
'interval': 5,
|
|
||||||
'redirect_url': redirect_url,
|
|
||||||
}
|
|
||||||
return FlashMessageUtil.gen_and_redirect_to(message_data)
|
|
||||||
|
|
||||||
@staticmethod
|
|
||||||
def get_failed_response(redirect_url, title, msg):
|
|
||||||
message_data = {
|
|
||||||
'title': title,
|
|
||||||
'error': msg,
|
|
||||||
'interval': 5,
|
|
||||||
'redirect_url': redirect_url,
|
|
||||||
}
|
|
||||||
return FlashMessageUtil.gen_and_redirect_to(message_data)
|
|
||||||
|
|
||||||
def get_already_bound_response(self, redirect_url):
|
def get_already_bound_response(self, redirect_url):
|
||||||
msg = _('WeCom is already bound')
|
msg = _('WeCom is already bound')
|
||||||
response = self.get_failed_response(redirect_url, msg, msg)
|
response = self.get_failed_response(redirect_url, msg, msg)
|
||||||
|
@ -208,45 +190,17 @@ class WeComQRLoginView(WeComQRMixin, METAMixin, View):
|
||||||
return HttpResponseRedirect(url)
|
return HttpResponseRedirect(url)
|
||||||
|
|
||||||
|
|
||||||
class WeComQRLoginCallbackView(AuthMixin, WeComQRMixin, View):
|
class WeComQRLoginCallbackView(WeComQRMixin, BaseLoginCallbackView):
|
||||||
permission_classes = (AllowAny,)
|
permission_classes = (AllowAny,)
|
||||||
|
|
||||||
def get(self, request: HttpRequest):
|
client_type_path = 'common.sdk.im.wecom.WeCom'
|
||||||
code = request.GET.get('code')
|
client_auth_params = {'corpid': 'WECOM_CORPID', 'corpsecret': 'WECOM_SECRET', 'agentid': 'WECOM_AGENTID'}
|
||||||
redirect_url = request.GET.get('redirect_url')
|
user_type = 'wecom'
|
||||||
login_url = reverse('authentication:login')
|
auth_backend = 'AUTH_BACKEND_WECOM'
|
||||||
|
|
||||||
if not self.verify_state():
|
msg_client_err = _('WeCom Error')
|
||||||
return self.get_verify_state_failed_response(redirect_url)
|
msg_user_not_bound_err = _('WeCom is not bound')
|
||||||
|
msg_not_found_user_from_client_err = _('Failed to get user from WeCom')
|
||||||
wecom = WeCom(
|
|
||||||
corpid=settings.WECOM_CORPID,
|
|
||||||
corpsecret=settings.WECOM_SECRET,
|
|
||||||
agentid=settings.WECOM_AGENTID
|
|
||||||
)
|
|
||||||
wecom_userid, __ = wecom.get_user_id_by_code(code)
|
|
||||||
if not wecom_userid:
|
|
||||||
# 正常流程不会出这个错误,hack 行为
|
|
||||||
msg = _('Failed to get user from WeCom')
|
|
||||||
response = self.get_failed_response(login_url, title=msg, msg=msg)
|
|
||||||
return response
|
|
||||||
|
|
||||||
user = get_object_or_none(User, wecom_id=wecom_userid)
|
|
||||||
if user is None:
|
|
||||||
title = _('WeCom is not bound')
|
|
||||||
msg = _('Please login with a password and then bind the WeCom')
|
|
||||||
response = self.get_failed_response(login_url, title=title, msg=msg)
|
|
||||||
return response
|
|
||||||
|
|
||||||
try:
|
|
||||||
self.check_oauth2_auth(user, settings.AUTH_BACKEND_WECOM)
|
|
||||||
except errors.AuthFailedError as e:
|
|
||||||
self.set_login_failed_mark()
|
|
||||||
msg = e.msg
|
|
||||||
response = self.get_failed_response(login_url, title=msg, msg=msg)
|
|
||||||
return response
|
|
||||||
|
|
||||||
return self.redirect_to_guard_view()
|
|
||||||
|
|
||||||
|
|
||||||
class WeComOAuthLoginView(WeComOAuthMixin, View):
|
class WeComOAuthLoginView(WeComOAuthMixin, View):
|
||||||
|
|
|
@ -1,6 +1,10 @@
|
||||||
import abc
|
import abc
|
||||||
|
import io
|
||||||
|
import re
|
||||||
from datetime import datetime
|
from datetime import datetime
|
||||||
|
|
||||||
|
import pyzipper
|
||||||
|
from django.utils.translation import ugettext_lazy as _
|
||||||
from rest_framework import serializers
|
from rest_framework import serializers
|
||||||
from rest_framework.renderers import BaseRenderer
|
from rest_framework.renderers import BaseRenderer
|
||||||
from rest_framework.utils import encoders, json
|
from rest_framework.utils import encoders, json
|
||||||
|
@ -181,8 +185,35 @@ class BaseFileRenderer(BaseRenderer):
|
||||||
self.write_rows(rows)
|
self.write_rows(rows)
|
||||||
self.after_render()
|
self.after_render()
|
||||||
value = self.get_rendered_value()
|
value = self.get_rendered_value()
|
||||||
|
if getattr(view, 'export_as_zip', False) and self.template == 'export':
|
||||||
|
value = self.compress_into_zip_file(value, request, response)
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
logger.debug(e, exc_info=True)
|
logger.debug(e, exc_info=True)
|
||||||
value = 'Render error! ({})'.format(self.media_type).encode('utf-8')
|
value = 'Render error! ({})'.format(self.media_type).encode('utf-8')
|
||||||
return value
|
return value
|
||||||
return value
|
return value
|
||||||
|
|
||||||
|
def compress_into_zip_file(self, value, request, response):
|
||||||
|
filename_pattern = re.compile(r'filename="([^"]+)"')
|
||||||
|
content_disposition = response['Content-Disposition']
|
||||||
|
match = filename_pattern.search(content_disposition)
|
||||||
|
filename = match.group(1)
|
||||||
|
response['Content-Disposition'] = content_disposition.replace(self.format, 'zip')
|
||||||
|
|
||||||
|
contents_io = io.BytesIO()
|
||||||
|
secret_key = request.user.secret_key
|
||||||
|
if not secret_key:
|
||||||
|
content = _("{} - The encryption password has not been set - "
|
||||||
|
"please go to personal information -> file encryption password "
|
||||||
|
"to set the encryption password").format(request.user.name)
|
||||||
|
|
||||||
|
response['Content-Disposition'] = content_disposition.replace(self.format, 'txt')
|
||||||
|
contents_io.write(content.encode('utf-8'))
|
||||||
|
return contents_io.getvalue()
|
||||||
|
|
||||||
|
with pyzipper.AESZipFile(
|
||||||
|
contents_io, 'w', compression=pyzipper.ZIP_LZMA, encryption=pyzipper.WZ_AES
|
||||||
|
) as zf:
|
||||||
|
zf.setpassword(secret_key.encode('utf8'))
|
||||||
|
zf.writestr(filename, value)
|
||||||
|
return contents_io.getvalue()
|
||||||
|
|
|
@ -11,7 +11,7 @@ class FlowerService(BaseService):
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def db_file(self):
|
def db_file(self):
|
||||||
return os.path.join(BASE_DIR, 'data', 'flower')
|
return os.path.join(BASE_DIR, 'data', 'flower.db')
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def cmd(self):
|
def cmd(self):
|
||||||
|
|
|
@ -5,6 +5,7 @@ import base64
|
||||||
from common.utils import get_logger
|
from common.utils import get_logger
|
||||||
from common.sdk.im.utils import digest, as_request
|
from common.sdk.im.utils import digest, as_request
|
||||||
from common.sdk.im.mixin import BaseRequest
|
from common.sdk.im.mixin import BaseRequest
|
||||||
|
from users.utils import construct_user_email
|
||||||
|
|
||||||
logger = get_logger(__file__)
|
logger = get_logger(__file__)
|
||||||
|
|
||||||
|
@ -35,6 +36,7 @@ class URL:
|
||||||
SEND_MESSAGE = 'https://oapi.dingtalk.com/topapi/message/corpconversation/asyncsend_v2'
|
SEND_MESSAGE = 'https://oapi.dingtalk.com/topapi/message/corpconversation/asyncsend_v2'
|
||||||
GET_SEND_MSG_PROGRESS = 'https://oapi.dingtalk.com/topapi/message/corpconversation/getsendprogress'
|
GET_SEND_MSG_PROGRESS = 'https://oapi.dingtalk.com/topapi/message/corpconversation/getsendprogress'
|
||||||
GET_USERID_BY_UNIONID = 'https://oapi.dingtalk.com/topapi/user/getbyunionid'
|
GET_USERID_BY_UNIONID = 'https://oapi.dingtalk.com/topapi/user/getbyunionid'
|
||||||
|
GET_USER_INFO_BY_USER_ID = 'https://oapi.dingtalk.com/topapi/v2/user/get'
|
||||||
|
|
||||||
|
|
||||||
class DingTalkRequests(BaseRequest):
|
class DingTalkRequests(BaseRequest):
|
||||||
|
@ -129,11 +131,11 @@ class DingTalk:
|
||||||
data = self._request.post(URL.GET_USER_INFO_BY_CODE, json=body, with_sign=True)
|
data = self._request.post(URL.GET_USER_INFO_BY_CODE, json=body, with_sign=True)
|
||||||
return data['user_info']
|
return data['user_info']
|
||||||
|
|
||||||
def get_userid_by_code(self, code):
|
def get_user_id_by_code(self, code):
|
||||||
user_info = self.get_userinfo_bycode(code)
|
user_info = self.get_userinfo_bycode(code)
|
||||||
unionid = user_info['unionid']
|
unionid = user_info['unionid']
|
||||||
userid = self.get_userid_by_unionid(unionid)
|
userid = self.get_userid_by_unionid(unionid)
|
||||||
return userid
|
return userid, None
|
||||||
|
|
||||||
def get_userid_by_unionid(self, unionid):
|
def get_userid_by_unionid(self, unionid):
|
||||||
body = {
|
body = {
|
||||||
|
@ -195,3 +197,18 @@ class DingTalk:
|
||||||
|
|
||||||
data = self._request.post(URL.GET_SEND_MSG_PROGRESS, json=body, with_token=True)
|
data = self._request.post(URL.GET_SEND_MSG_PROGRESS, json=body, with_token=True)
|
||||||
return data
|
return data
|
||||||
|
|
||||||
|
def get_user_detail(self, user_id, **kwargs):
|
||||||
|
# https://open.dingtalk.com/document/orgapp/query-user-details
|
||||||
|
body = {'userid': user_id}
|
||||||
|
data = self._request.post(
|
||||||
|
URL.GET_USER_INFO_BY_USER_ID, json=body, with_token=True
|
||||||
|
)
|
||||||
|
data = data['result']
|
||||||
|
username = user_id
|
||||||
|
name = data.get('name', username)
|
||||||
|
email = data.get('email') or data.get('org_email')
|
||||||
|
email = construct_user_email(username, email)
|
||||||
|
return {
|
||||||
|
'username': username, 'name': name, 'email': email
|
||||||
|
}
|
||||||
|
|
|
@ -1,9 +1,9 @@
|
||||||
import json
|
import json
|
||||||
|
|
||||||
from django.utils.translation import ugettext_lazy as _
|
|
||||||
from rest_framework.exceptions import APIException
|
from rest_framework.exceptions import APIException
|
||||||
|
|
||||||
from django.conf import settings
|
from django.conf import settings
|
||||||
|
from users.utils import construct_user_email
|
||||||
from common.utils.common import get_logger
|
from common.utils.common import get_logger
|
||||||
from common.sdk.im.utils import digest
|
from common.sdk.im.utils import digest
|
||||||
from common.sdk.im.mixin import RequestMixin, BaseRequest
|
from common.sdk.im.mixin import RequestMixin, BaseRequest
|
||||||
|
@ -37,6 +37,9 @@ class URL:
|
||||||
def send_message(self):
|
def send_message(self):
|
||||||
return f'{self.host}/open-apis/im/v1/messages'
|
return f'{self.host}/open-apis/im/v1/messages'
|
||||||
|
|
||||||
|
def get_user_detail(self, user_id):
|
||||||
|
return f'{self.host}/open-apis/contact/v3/users/{user_id}'
|
||||||
|
|
||||||
|
|
||||||
class ErrorCode:
|
class ErrorCode:
|
||||||
INVALID_APP_ACCESS_TOKEN = 99991664
|
INVALID_APP_ACCESS_TOKEN = 99991664
|
||||||
|
@ -106,7 +109,7 @@ class FeiShu(RequestMixin):
|
||||||
data = self._requests.post(URL().get_user_info_by_code, json=body, check_errcode_is_0=False)
|
data = self._requests.post(URL().get_user_info_by_code, json=body, check_errcode_is_0=False)
|
||||||
|
|
||||||
self._requests.check_errcode_is_0(data)
|
self._requests.check_errcode_is_0(data)
|
||||||
return data['data']['user_id']
|
return data['data']['user_id'], data['data']
|
||||||
|
|
||||||
def send_text(self, user_ids, msg):
|
def send_text(self, user_ids, msg):
|
||||||
params = {
|
params = {
|
||||||
|
@ -130,3 +133,15 @@ class FeiShu(RequestMixin):
|
||||||
logger.exception(e)
|
logger.exception(e)
|
||||||
invalid_users.append(user_id)
|
invalid_users.append(user_id)
|
||||||
return invalid_users
|
return invalid_users
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def get_user_detail(user_id, **kwargs):
|
||||||
|
# get_user_id_by_code 已经返回个人信息,这里直接解析
|
||||||
|
data = kwargs['other_info']
|
||||||
|
username = user_id
|
||||||
|
name = data.get('name', username)
|
||||||
|
email = data.get('email') or data.get('enterprise_email')
|
||||||
|
email = construct_user_email(username, email)
|
||||||
|
return {
|
||||||
|
'username': username, 'name': name, 'email': email
|
||||||
|
}
|
||||||
|
|
|
@ -3,8 +3,9 @@ from typing import Iterable, AnyStr
|
||||||
from django.utils.translation import ugettext_lazy as _
|
from django.utils.translation import ugettext_lazy as _
|
||||||
from rest_framework.exceptions import APIException
|
from rest_framework.exceptions import APIException
|
||||||
|
|
||||||
|
from users.utils import construct_user_email
|
||||||
from common.utils.common import get_logger
|
from common.utils.common import get_logger
|
||||||
from common.sdk.im.utils import digest, DictWrapper, update_values, set_default
|
from common.sdk.im.utils import digest, update_values
|
||||||
from common.sdk.im.mixin import RequestMixin, BaseRequest
|
from common.sdk.im.mixin import RequestMixin, BaseRequest
|
||||||
|
|
||||||
logger = get_logger(__name__)
|
logger = get_logger(__name__)
|
||||||
|
@ -151,10 +152,7 @@ class WeCom(RequestMixin):
|
||||||
|
|
||||||
def get_user_id_by_code(self, code):
|
def get_user_id_by_code(self, code):
|
||||||
# # https://open.work.weixin.qq.com/api/doc/90000/90135/91437
|
# # https://open.work.weixin.qq.com/api/doc/90000/90135/91437
|
||||||
|
params = {'code': code}
|
||||||
params = {
|
|
||||||
'code': code,
|
|
||||||
}
|
|
||||||
data = self._requests.get(URL.GET_USER_ID_BY_CODE, params=params, check_errcode_is_0=False)
|
data = self._requests.get(URL.GET_USER_ID_BY_CODE, params=params, check_errcode_is_0=False)
|
||||||
|
|
||||||
errcode = data['errcode']
|
errcode = data['errcode']
|
||||||
|
@ -175,12 +173,15 @@ class WeCom(RequestMixin):
|
||||||
logger.error(f'WeCom response 200 but get field from json error: fields=UserId|OpenId')
|
logger.error(f'WeCom response 200 but get field from json error: fields=UserId|OpenId')
|
||||||
raise WeComError
|
raise WeComError
|
||||||
|
|
||||||
def get_user_detail(self, id):
|
def get_user_detail(self, user_id, **kwargs):
|
||||||
# https://open.work.weixin.qq.com/api/doc/90000/90135/90196
|
# https://open.work.weixin.qq.com/api/doc/90000/90135/90196
|
||||||
|
params = {'userid': user_id}
|
||||||
params = {
|
data = self._requests.get(URL.GET_USER_DETAIL, params)
|
||||||
'userid': id,
|
username = data.get('userid')
|
||||||
|
name = data.get('name', username)
|
||||||
|
email = data.get('email') or data.get('biz_mail')
|
||||||
|
email = construct_user_email(username, email)
|
||||||
|
return {
|
||||||
|
'username': username, 'name': name, 'email': email
|
||||||
}
|
}
|
||||||
|
|
||||||
data = self._requests.get(URL.GET_USER_DETAIL, params)
|
|
||||||
return data
|
|
||||||
|
|
|
@ -175,6 +175,8 @@ def _parse_ssh_private_key(text, password=None):
|
||||||
dsa.DSAPrivateKey,
|
dsa.DSAPrivateKey,
|
||||||
ed25519.Ed25519PrivateKey,
|
ed25519.Ed25519PrivateKey,
|
||||||
"""
|
"""
|
||||||
|
if not bool(password):
|
||||||
|
password = None
|
||||||
if isinstance(text, str):
|
if isinstance(text, str):
|
||||||
try:
|
try:
|
||||||
text = text.encode("utf-8")
|
text = text.encode("utf-8")
|
||||||
|
|
|
@ -45,3 +45,7 @@ def get_remote_addr(request):
|
||||||
|
|
||||||
def is_true(value):
|
def is_true(value):
|
||||||
return value in BooleanField.TRUE_VALUES
|
return value in BooleanField.TRUE_VALUES
|
||||||
|
|
||||||
|
|
||||||
|
def is_false(value):
|
||||||
|
return value in BooleanField.FALSE_VALUES
|
||||||
|
|
|
@ -229,7 +229,9 @@ class Config(dict):
|
||||||
'SESSION_COOKIE_AGE': 3600 * 24,
|
'SESSION_COOKIE_AGE': 3600 * 24,
|
||||||
'SESSION_EXPIRE_AT_BROWSER_CLOSE': False,
|
'SESSION_EXPIRE_AT_BROWSER_CLOSE': False,
|
||||||
'LOGIN_URL': reverse_lazy('authentication:login'),
|
'LOGIN_URL': reverse_lazy('authentication:login'),
|
||||||
'CONNECTION_TOKEN_EXPIRATION': 5 * 60,
|
'CONNECTION_TOKEN_EXPIRATION': 5 * 60, # 默认
|
||||||
|
'CONNECTION_TOKEN_EXPIRATION_MAX': 60 * 60 * 24 * 30, # 最大
|
||||||
|
'CONNECTION_TOKEN_REUSABLE': False,
|
||||||
|
|
||||||
# Custom Config
|
# Custom Config
|
||||||
'AUTH_CUSTOM': False,
|
'AUTH_CUSTOM': False,
|
||||||
|
|
|
@ -2,6 +2,7 @@
|
||||||
|
|
||||||
path_perms_map = {
|
path_perms_map = {
|
||||||
'xpack': '*',
|
'xpack': '*',
|
||||||
|
'settings': '*',
|
||||||
'replay': 'default',
|
'replay': 'default',
|
||||||
'applets': 'terminal.view_applet',
|
'applets': 'terminal.view_applet',
|
||||||
'playbooks': 'ops.view_playbook'
|
'playbooks': 'ops.view_playbook'
|
||||||
|
|
|
@ -131,6 +131,9 @@ TICKETS_ENABLED = CONFIG.TICKETS_ENABLED
|
||||||
REFERER_CHECK_ENABLED = CONFIG.REFERER_CHECK_ENABLED
|
REFERER_CHECK_ENABLED = CONFIG.REFERER_CHECK_ENABLED
|
||||||
|
|
||||||
CONNECTION_TOKEN_ENABLED = CONFIG.CONNECTION_TOKEN_ENABLED
|
CONNECTION_TOKEN_ENABLED = CONFIG.CONNECTION_TOKEN_ENABLED
|
||||||
|
CONNECTION_TOKEN_REUSABLE = CONFIG.CONNECTION_TOKEN_REUSABLE
|
||||||
|
CONNECTION_TOKEN_EXPIRATION_MAX = CONFIG.CONNECTION_TOKEN_EXPIRATION_MAX
|
||||||
|
|
||||||
FORGOT_PASSWORD_URL = CONFIG.FORGOT_PASSWORD_URL
|
FORGOT_PASSWORD_URL = CONFIG.FORGOT_PASSWORD_URL
|
||||||
|
|
||||||
# 自定义默认组织名
|
# 自定义默认组织名
|
||||||
|
|
|
@ -1,3 +1,3 @@
|
||||||
version https://git-lfs.github.com/spec/v1
|
version https://git-lfs.github.com/spec/v1
|
||||||
oid sha256:24858bf247f7af58abda5adb5be733b7b995df2e26de7c91caf43f7aa0dd3be0
|
oid sha256:523a93e9703e62c39440d2e172c96fea7d8d04965cab43095fc8a378d157bf59
|
||||||
size 139654
|
size 141798
|
||||||
|
|
File diff suppressed because it is too large
Load Diff
|
@ -1,3 +1,3 @@
|
||||||
version https://git-lfs.github.com/spec/v1
|
version https://git-lfs.github.com/spec/v1
|
||||||
oid sha256:0788b48bc50cffe3e7ff83803ef0edadfc120c9165bfe6bccd1f896d8bf39397
|
oid sha256:bd60ca8b6c43b9b5940b14a8ca8073ae26062a5402f663ac39043cbc669199bd
|
||||||
size 114419
|
size 116040
|
||||||
|
|
File diff suppressed because it is too large
Load Diff
|
@ -1,4 +1,5 @@
|
||||||
from collections import defaultdict
|
from collections import defaultdict
|
||||||
|
from functools import reduce
|
||||||
|
|
||||||
|
|
||||||
class DefaultCallback:
|
class DefaultCallback:
|
||||||
|
@ -18,6 +19,7 @@ class DefaultCallback:
|
||||||
failures=defaultdict(dict),
|
failures=defaultdict(dict),
|
||||||
dark=defaultdict(dict),
|
dark=defaultdict(dict),
|
||||||
skipped=defaultdict(dict),
|
skipped=defaultdict(dict),
|
||||||
|
ignored=defaultdict(dict),
|
||||||
)
|
)
|
||||||
self.summary = dict(
|
self.summary = dict(
|
||||||
ok=[],
|
ok=[],
|
||||||
|
@ -59,6 +61,14 @@ class DefaultCallback:
|
||||||
}
|
}
|
||||||
self.result['ok'][host][task] = detail
|
self.result['ok'][host][task] = detail
|
||||||
|
|
||||||
|
def runner_on_skipped(self, event_data, host=None, task=None, **kwargs):
|
||||||
|
detail = {
|
||||||
|
'action': event_data.get('task_action', ''),
|
||||||
|
'res': {},
|
||||||
|
'rc': 0,
|
||||||
|
}
|
||||||
|
self.result['skipped'][host][task] = detail
|
||||||
|
|
||||||
def runner_on_failed(self, event_data, host=None, task=None, res=None, **kwargs):
|
def runner_on_failed(self, event_data, host=None, task=None, res=None, **kwargs):
|
||||||
detail = {
|
detail = {
|
||||||
'action': event_data.get('task_action', ''),
|
'action': event_data.get('task_action', ''),
|
||||||
|
@ -67,15 +77,9 @@ class DefaultCallback:
|
||||||
'stdout': res.get('stdout', ''),
|
'stdout': res.get('stdout', ''),
|
||||||
'stderr': ';'.join([res.get('stderr', ''), res.get('msg', '')]).strip(';')
|
'stderr': ';'.join([res.get('stderr', ''), res.get('msg', '')]).strip(';')
|
||||||
}
|
}
|
||||||
self.result['failures'][host][task] = detail
|
ignore_errors = event_data.get('ignore_errors', False)
|
||||||
|
error_key = 'ignored' if ignore_errors else 'failures'
|
||||||
def runner_on_skipped(self, event_data, host=None, task=None, **kwargs):
|
self.result[error_key][host][task] = detail
|
||||||
detail = {
|
|
||||||
'action': event_data.get('task_action', ''),
|
|
||||||
'res': {},
|
|
||||||
'rc': 0,
|
|
||||||
}
|
|
||||||
self.result['skipped'][host][task] = detail
|
|
||||||
|
|
||||||
def runner_on_unreachable(self, event_data, host=None, task=None, res=None, **kwargs):
|
def runner_on_unreachable(self, event_data, host=None, task=None, res=None, **kwargs):
|
||||||
detail = {
|
detail = {
|
||||||
|
@ -106,13 +110,18 @@ class DefaultCallback:
|
||||||
|
|
||||||
def playbook_on_stats(self, event_data, **kwargs):
|
def playbook_on_stats(self, event_data, **kwargs):
|
||||||
failed = []
|
failed = []
|
||||||
for i in ['dark', 'failures']:
|
error_func = lambda err, task_detail: err + f"{task_detail[0]}: {task_detail[1]['stderr']};"
|
||||||
for host, tasks in self.result[i].items():
|
for tp in ['dark', 'failures']:
|
||||||
|
for host, tasks in self.result[tp].items():
|
||||||
failed.append(host)
|
failed.append(host)
|
||||||
error = ''
|
error = reduce(error_func, tasks.items(), '').strip(';')
|
||||||
for task, detail in tasks.items():
|
self.summary[tp][host] = error
|
||||||
error += f'{task}: {detail["stderr"]};'
|
|
||||||
self.summary[i][host] = error.strip(';')
|
for host, tasks in self.result.get('ignored', {}).items():
|
||||||
|
ignore_errors = reduce(error_func, tasks.items(), '').strip(';')
|
||||||
|
if host in failed:
|
||||||
|
self.summary['failures'][host] += {ignore_errors}
|
||||||
|
|
||||||
self.summary['ok'] = list(set(self.result['ok'].keys()) - set(failed))
|
self.summary['ok'] = list(set(self.result['ok'].keys()) - set(failed))
|
||||||
self.summary['skipped'] = list(set(self.result['skipped'].keys()) - set(failed))
|
self.summary['skipped'] = list(set(self.result['skipped'].keys()) - set(failed))
|
||||||
|
|
||||||
|
|
|
@ -9,8 +9,11 @@ __all__ = ['JMSInventory']
|
||||||
|
|
||||||
|
|
||||||
class JMSInventory:
|
class JMSInventory:
|
||||||
def __init__(self, assets, account_policy='privileged_first',
|
def __init__(
|
||||||
account_prefer='root,Administrator', host_callback=None, exclude_localhost=False):
|
self, assets, account_policy='privileged_first',
|
||||||
|
account_prefer='root,Administrator', host_callback=None,
|
||||||
|
exclude_localhost=False, task_type=None
|
||||||
|
):
|
||||||
"""
|
"""
|
||||||
:param assets:
|
:param assets:
|
||||||
:param account_prefer: account username name if not set use account_policy
|
:param account_prefer: account username name if not set use account_policy
|
||||||
|
@ -22,6 +25,7 @@ class JMSInventory:
|
||||||
self.host_callback = host_callback
|
self.host_callback = host_callback
|
||||||
self.exclude_hosts = {}
|
self.exclude_hosts = {}
|
||||||
self.exclude_localhost = exclude_localhost
|
self.exclude_localhost = exclude_localhost
|
||||||
|
self.task_type = task_type
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def clean_assets(assets):
|
def clean_assets(assets):
|
||||||
|
@ -73,6 +77,7 @@ class JMSInventory:
|
||||||
return var
|
return var
|
||||||
|
|
||||||
def make_account_vars(self, host, asset, account, automation, protocol, platform, gateway):
|
def make_account_vars(self, host, asset, account, automation, protocol, platform, gateway):
|
||||||
|
from accounts.const import AutomationTypes
|
||||||
if not account:
|
if not account:
|
||||||
host['error'] = _("No account available")
|
host['error'] = _("No account available")
|
||||||
return host
|
return host
|
||||||
|
@ -92,6 +97,12 @@ class JMSInventory:
|
||||||
host['ansible_become_password'] = su_from.secret
|
host['ansible_become_password'] = su_from.secret
|
||||||
else:
|
else:
|
||||||
host['ansible_become_password'] = account.secret
|
host['ansible_become_password'] = account.secret
|
||||||
|
elif platform.su_enabled and not su_from and \
|
||||||
|
self.task_type in (AutomationTypes.change_secret, AutomationTypes.push_account):
|
||||||
|
host.update(self.make_account_ansible_vars(account))
|
||||||
|
host['ansible_become'] = True
|
||||||
|
host['ansible_become_user'] = 'root'
|
||||||
|
host['ansible_become_password'] = account.secret
|
||||||
else:
|
else:
|
||||||
host.update(self.make_account_ansible_vars(account))
|
host.update(self.make_account_ansible_vars(account))
|
||||||
|
|
||||||
|
|
|
@ -4,19 +4,19 @@ import os
|
||||||
import re
|
import re
|
||||||
|
|
||||||
from celery.result import AsyncResult
|
from celery.result import AsyncResult
|
||||||
from rest_framework import generics, viewsets, mixins, status
|
|
||||||
from django.shortcuts import get_object_or_404
|
from django.shortcuts import get_object_or_404
|
||||||
from django.utils.translation import ugettext as _
|
from django.utils.translation import ugettext as _
|
||||||
from django_celery_beat.models import PeriodicTask
|
from django_celery_beat.models import PeriodicTask
|
||||||
|
from rest_framework import generics, viewsets, mixins, status
|
||||||
from rest_framework.response import Response
|
from rest_framework.response import Response
|
||||||
|
|
||||||
|
from common.api import LogTailApi, CommonApiMixin
|
||||||
from common.exceptions import JMSException
|
from common.exceptions import JMSException
|
||||||
from common.permissions import IsValidUser
|
from common.permissions import IsValidUser
|
||||||
from common.api import LogTailApi, CommonApiMixin
|
|
||||||
from ops.celery import app
|
from ops.celery import app
|
||||||
from ..models import CeleryTaskExecution, CeleryTask
|
|
||||||
from ..celery.utils import get_celery_task_log_path
|
|
||||||
from ..ansible.utils import get_ansible_task_log_path
|
from ..ansible.utils import get_ansible_task_log_path
|
||||||
|
from ..celery.utils import get_celery_task_log_path
|
||||||
|
from ..models import CeleryTaskExecution, CeleryTask
|
||||||
from ..serializers import CeleryResultSerializer, CeleryPeriodTaskSerializer
|
from ..serializers import CeleryResultSerializer, CeleryPeriodTaskSerializer
|
||||||
from ..serializers.celery import CeleryTaskSerializer, CeleryTaskExecutionSerializer
|
from ..serializers.celery import CeleryTaskSerializer, CeleryTaskExecutionSerializer
|
||||||
|
|
||||||
|
|
|
@ -1,23 +1,28 @@
|
||||||
from django.conf import settings
|
from django.conf import settings
|
||||||
from django.db.models import Count
|
from django.db.models import Count
|
||||||
from django.db.transaction import atomic
|
from django.db.transaction import atomic
|
||||||
from rest_framework.views import APIView
|
|
||||||
from django.shortcuts import get_object_or_404
|
from django.shortcuts import get_object_or_404
|
||||||
from rest_framework.response import Response
|
from rest_framework.response import Response
|
||||||
|
from rest_framework.views import APIView
|
||||||
|
|
||||||
|
from assets.models import Asset
|
||||||
|
from common.permissions import IsValidUser
|
||||||
from ops.const import Types
|
from ops.const import Types
|
||||||
from ops.models import Job, JobExecution
|
from ops.models import Job, JobExecution
|
||||||
from ops.serializers.job import JobSerializer, JobExecutionSerializer
|
from ops.serializers.job import JobSerializer, JobExecutionSerializer
|
||||||
|
|
||||||
__all__ = ['JobViewSet', 'JobExecutionViewSet', 'JobRunVariableHelpAPIView',
|
__all__ = [
|
||||||
'JobAssetDetail', 'JobExecutionTaskDetail', 'FrequentUsernames']
|
'JobViewSet', 'JobExecutionViewSet', 'JobRunVariableHelpAPIView',
|
||||||
|
'JobAssetDetail', 'JobExecutionTaskDetail', 'UsernameHintsAPI'
|
||||||
|
]
|
||||||
|
|
||||||
from ops.tasks import run_ops_job_execution
|
from ops.tasks import run_ops_job_execution
|
||||||
from ops.variables import JMS_JOB_VARIABLE_HELP
|
from ops.variables import JMS_JOB_VARIABLE_HELP
|
||||||
from orgs.mixins.api import OrgBulkModelViewSet
|
from orgs.mixins.api import OrgBulkModelViewSet
|
||||||
from orgs.utils import tmp_to_org, get_current_org
|
from orgs.utils import tmp_to_org, get_current_org
|
||||||
from accounts.models import Account
|
from accounts.models import Account
|
||||||
from rbac.permissions import RBACPermission
|
from perms.models import PermNode
|
||||||
|
from perms.utils import UserPermAssetUtil
|
||||||
|
|
||||||
|
|
||||||
def set_task_to_serializer_data(serializer, task):
|
def set_task_to_serializer_data(serializer, task):
|
||||||
|
@ -26,9 +31,22 @@ def set_task_to_serializer_data(serializer, task):
|
||||||
setattr(serializer, "_data", data)
|
setattr(serializer, "_data", data)
|
||||||
|
|
||||||
|
|
||||||
|
def merge_nodes_and_assets(nodes, assets, user):
|
||||||
|
if nodes:
|
||||||
|
perm_util = UserPermAssetUtil(user=user)
|
||||||
|
for node_id in nodes:
|
||||||
|
if node_id == PermNode.FAVORITE_NODE_KEY:
|
||||||
|
node_assets = perm_util.get_favorite_assets()
|
||||||
|
elif node_id == PermNode.UNGROUPED_NODE_KEY:
|
||||||
|
node_assets = perm_util.get_ungroup_assets()
|
||||||
|
else:
|
||||||
|
node, node_assets = perm_util.get_node_all_assets(node_id)
|
||||||
|
assets.extend(node_assets.exclude(id__in=[asset.id for asset in assets]))
|
||||||
|
return assets
|
||||||
|
|
||||||
|
|
||||||
class JobViewSet(OrgBulkModelViewSet):
|
class JobViewSet(OrgBulkModelViewSet):
|
||||||
serializer_class = JobSerializer
|
serializer_class = JobSerializer
|
||||||
permission_classes = (RBACPermission,)
|
|
||||||
search_fields = ('name', 'comment')
|
search_fields = ('name', 'comment')
|
||||||
model = Job
|
model = Job
|
||||||
|
|
||||||
|
@ -49,6 +67,10 @@ class JobViewSet(OrgBulkModelViewSet):
|
||||||
|
|
||||||
def perform_create(self, serializer):
|
def perform_create(self, serializer):
|
||||||
run_after_save = serializer.validated_data.pop('run_after_save', False)
|
run_after_save = serializer.validated_data.pop('run_after_save', False)
|
||||||
|
node_ids = serializer.validated_data.pop('nodes', [])
|
||||||
|
assets = serializer.validated_data.__getitem__('assets')
|
||||||
|
assets = merge_nodes_and_assets(node_ids, assets, self.request.user)
|
||||||
|
serializer.validated_data.__setitem__('assets', assets)
|
||||||
instance = serializer.save()
|
instance = serializer.save()
|
||||||
if instance.instant or run_after_save:
|
if instance.instant or run_after_save:
|
||||||
self.run_job(instance, serializer)
|
self.run_job(instance, serializer)
|
||||||
|
@ -70,9 +92,9 @@ class JobViewSet(OrgBulkModelViewSet):
|
||||||
class JobExecutionViewSet(OrgBulkModelViewSet):
|
class JobExecutionViewSet(OrgBulkModelViewSet):
|
||||||
serializer_class = JobExecutionSerializer
|
serializer_class = JobExecutionSerializer
|
||||||
http_method_names = ('get', 'post', 'head', 'options',)
|
http_method_names = ('get', 'post', 'head', 'options',)
|
||||||
permission_classes = (RBACPermission,)
|
|
||||||
model = JobExecution
|
model = JobExecution
|
||||||
search_fields = ('material',)
|
search_fields = ('material',)
|
||||||
|
filterset_fields = ['status', 'job_id']
|
||||||
|
|
||||||
@atomic
|
@atomic
|
||||||
def perform_create(self, serializer):
|
def perform_create(self, serializer):
|
||||||
|
@ -88,56 +110,66 @@ class JobExecutionViewSet(OrgBulkModelViewSet):
|
||||||
def get_queryset(self):
|
def get_queryset(self):
|
||||||
queryset = super().get_queryset()
|
queryset = super().get_queryset()
|
||||||
queryset = queryset.filter(creator=self.request.user)
|
queryset = queryset.filter(creator=self.request.user)
|
||||||
job_id = self.request.query_params.get('job_id')
|
|
||||||
if job_id:
|
|
||||||
queryset = queryset.filter(job_id=job_id)
|
|
||||||
return queryset
|
return queryset
|
||||||
|
|
||||||
|
|
||||||
|
class JobAssetDetail(APIView):
|
||||||
|
rbac_perms = {
|
||||||
|
'get': ['ops.view_jobexecution'],
|
||||||
|
}
|
||||||
|
|
||||||
|
def get(self, request, **kwargs):
|
||||||
|
execution_id = request.query_params.get('execution_id', '')
|
||||||
|
execution = get_object_or_404(JobExecution, id=execution_id)
|
||||||
|
return Response(data=execution.assent_result_detail)
|
||||||
|
|
||||||
|
|
||||||
|
class JobExecutionTaskDetail(APIView):
|
||||||
|
rbac_perms = {
|
||||||
|
'GET': ['ops.view_jobexecution'],
|
||||||
|
}
|
||||||
|
|
||||||
|
def get(self, request, **kwargs):
|
||||||
|
org = get_current_org()
|
||||||
|
task_id = str(kwargs.get('task_id'))
|
||||||
|
|
||||||
|
with tmp_to_org(org):
|
||||||
|
execution = get_object_or_404(JobExecution, task_id=task_id)
|
||||||
|
|
||||||
|
return Response(data={
|
||||||
|
'status': execution.status,
|
||||||
|
'is_finished': execution.is_finished,
|
||||||
|
'is_success': execution.is_success,
|
||||||
|
'time_cost': execution.time_cost,
|
||||||
|
'job_id': execution.job.id,
|
||||||
|
})
|
||||||
|
|
||||||
|
|
||||||
class JobRunVariableHelpAPIView(APIView):
|
class JobRunVariableHelpAPIView(APIView):
|
||||||
rbac_perms = ()
|
permission_classes = [IsValidUser]
|
||||||
permission_classes = ()
|
|
||||||
|
|
||||||
def get(self, request, **kwargs):
|
def get(self, request, **kwargs):
|
||||||
return Response(data=JMS_JOB_VARIABLE_HELP)
|
return Response(data=JMS_JOB_VARIABLE_HELP)
|
||||||
|
|
||||||
|
|
||||||
class JobAssetDetail(APIView):
|
class UsernameHintsAPI(APIView):
|
||||||
rbac_perms = ()
|
permission_classes = [IsValidUser]
|
||||||
permission_classes = ()
|
|
||||||
|
|
||||||
def get(self, request, **kwargs):
|
def post(self, request, **kwargs):
|
||||||
execution_id = request.query_params.get('execution_id')
|
node_ids = request.data.get('nodes', None)
|
||||||
if execution_id:
|
asset_ids = request.data.get('assets', [])
|
||||||
execution = get_object_or_404(JobExecution, id=execution_id)
|
query = request.data.get('query', None)
|
||||||
return Response(data=execution.assent_result_detail)
|
|
||||||
|
|
||||||
|
assets = list(Asset.objects.filter(id__in=asset_ids).all())
|
||||||
|
|
||||||
class JobExecutionTaskDetail(APIView):
|
assets = merge_nodes_and_assets(node_ids, assets, request.user)
|
||||||
rbac_perms = ()
|
|
||||||
permission_classes = ()
|
|
||||||
|
|
||||||
def get(self, request, **kwargs):
|
top_accounts = Account.objects \
|
||||||
org = get_current_org()
|
.exclude(username__startswith='jms_') \
|
||||||
task_id = str(kwargs.get('task_id'))
|
.exclude(username__startswith='js_') \
|
||||||
if task_id:
|
.filter(username__icontains=query) \
|
||||||
with tmp_to_org(org):
|
.filter(asset__in=assets) \
|
||||||
execution = get_object_or_404(JobExecution, task_id=task_id)
|
.values('username') \
|
||||||
return Response(data={
|
.annotate(total=Count('username')) \
|
||||||
'status': execution.status,
|
.order_by('total', '-username')[:10]
|
||||||
'is_finished': execution.is_finished,
|
|
||||||
'is_success': execution.is_success,
|
|
||||||
'time_cost': execution.time_cost,
|
|
||||||
'job_id': execution.job.id,
|
|
||||||
})
|
|
||||||
|
|
||||||
|
|
||||||
class FrequentUsernames(APIView):
|
|
||||||
rbac_perms = ()
|
|
||||||
permission_classes = ()
|
|
||||||
|
|
||||||
def get(self, request, **kwargs):
|
|
||||||
top_accounts = Account.objects.exclude(username='root').exclude(username__startswith='jms_').values(
|
|
||||||
'username').annotate(
|
|
||||||
total=Count('username')).order_by('total')[:5]
|
|
||||||
return Response(data=top_accounts)
|
return Response(data=top_accounts)
|
||||||
|
|
|
@ -5,13 +5,14 @@ from django.utils.translation import gettext_lazy as _
|
||||||
from django_celery_beat.models import PeriodicTask
|
from django_celery_beat.models import PeriodicTask
|
||||||
from rest_framework import serializers
|
from rest_framework import serializers
|
||||||
|
|
||||||
|
from ops.celery import app
|
||||||
|
from ops.models import CeleryTask, CeleryTaskExecution
|
||||||
|
|
||||||
__all__ = [
|
__all__ = [
|
||||||
'CeleryResultSerializer', 'CeleryTaskExecutionSerializer',
|
'CeleryResultSerializer', 'CeleryTaskExecutionSerializer',
|
||||||
'CeleryPeriodTaskSerializer', 'CeleryTaskSerializer'
|
'CeleryPeriodTaskSerializer', 'CeleryTaskSerializer'
|
||||||
]
|
]
|
||||||
|
|
||||||
from ops.models import CeleryTask, CeleryTaskExecution
|
|
||||||
|
|
||||||
|
|
||||||
class CeleryResultSerializer(serializers.Serializer):
|
class CeleryResultSerializer(serializers.Serializer):
|
||||||
id = serializers.UUIDField()
|
id = serializers.UUIDField()
|
||||||
|
@ -37,11 +38,24 @@ class CeleryTaskSerializer(serializers.ModelSerializer):
|
||||||
|
|
||||||
class CeleryTaskExecutionSerializer(serializers.ModelSerializer):
|
class CeleryTaskExecutionSerializer(serializers.ModelSerializer):
|
||||||
is_success = serializers.BooleanField(required=False, read_only=True, label=_('Success'))
|
is_success = serializers.BooleanField(required=False, read_only=True, label=_('Success'))
|
||||||
|
task_name = serializers.SerializerMethodField()
|
||||||
|
|
||||||
class Meta:
|
class Meta:
|
||||||
model = CeleryTaskExecution
|
model = CeleryTaskExecution
|
||||||
fields = [
|
fields = [
|
||||||
"id", "name", "args", "kwargs", "time_cost", "timedelta",
|
"id", "name", "task_name", "args", "kwargs", "time_cost", "timedelta",
|
||||||
"is_success", "is_finished", "date_published",
|
"is_success", "is_finished", "date_published",
|
||||||
"date_start", "date_finished"
|
"date_start", "date_finished"
|
||||||
]
|
]
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def get_task_name(obj):
|
||||||
|
from assets.const import AutomationTypes as AssetTypes
|
||||||
|
from accounts.const import AutomationTypes as AccountTypes
|
||||||
|
tp_dict = dict(AssetTypes.choices) | dict(AccountTypes.choices)
|
||||||
|
tp = obj.kwargs.get('tp')
|
||||||
|
task = app.tasks.get(obj.name)
|
||||||
|
task_name = getattr(task, 'verbose_name', obj.name)
|
||||||
|
if tp:
|
||||||
|
task_name = f'{task_name}({tp_dict.get(tp, tp)})'
|
||||||
|
return task_name
|
||||||
|
|
|
@ -33,22 +33,6 @@ class JobSerializer(BulkOrgResourceModelSerializer, PeriodTaskSerializerMixin):
|
||||||
user = request.user if request else None
|
user = request.user if request else None
|
||||||
return user
|
return user
|
||||||
|
|
||||||
def create(self, validated_data):
|
|
||||||
assets = validated_data.__getitem__('assets')
|
|
||||||
node_ids = validated_data.pop('nodes', None)
|
|
||||||
if node_ids:
|
|
||||||
user = self.get_request_user()
|
|
||||||
perm_util = UserPermAssetUtil(user=user)
|
|
||||||
for node_id in node_ids:
|
|
||||||
if node_id == PermNode.FAVORITE_NODE_KEY:
|
|
||||||
node_assets = perm_util.get_favorite_assets()
|
|
||||||
elif node_id == PermNode.UNGROUPED_NODE_KEY:
|
|
||||||
node_assets = perm_util.get_ungroup_assets()
|
|
||||||
else:
|
|
||||||
node, node_assets = perm_util.get_node_all_assets(node_id)
|
|
||||||
assets.extend(node_assets.exclude(id__in=[asset.id for asset in assets]))
|
|
||||||
return super().create(validated_data)
|
|
||||||
|
|
||||||
class Meta:
|
class Meta:
|
||||||
model = Job
|
model = Job
|
||||||
read_only_fields = [
|
read_only_fields = [
|
||||||
|
|
|
@ -7,9 +7,12 @@
|
||||||
<script src="{% static 'js/plugins/xterm/addons/fit/fit.js' %}"></script>
|
<script src="{% static 'js/plugins/xterm/addons/fit/fit.js' %}"></script>
|
||||||
<link rel="stylesheet" href="{% static 'js/plugins/xterm/xterm.css' %}" />
|
<link rel="stylesheet" href="{% static 'js/plugins/xterm/xterm.css' %}" />
|
||||||
<link rel="shortcut icon" href="{{ INTERFACE.favicon }}" type="image/x-icon">
|
<link rel="shortcut icon" href="{{ INTERFACE.favicon }}" type="image/x-icon">
|
||||||
|
<script src="{% url 'javascript-catalog' %}"></script>
|
||||||
|
<script src="{% static "js/jumpserver.js" %}?_=9"></script>
|
||||||
<style>
|
<style>
|
||||||
body {
|
body {
|
||||||
background-color: black;
|
background-color: black;
|
||||||
|
margin: 0;
|
||||||
}
|
}
|
||||||
|
|
||||||
.xterm-rows {
|
.xterm-rows {
|
||||||
|
@ -37,8 +40,42 @@
|
||||||
background-color: #494141;
|
background-color: #494141;
|
||||||
border-radius: 6px;
|
border-radius: 6px;
|
||||||
}
|
}
|
||||||
|
#term {
|
||||||
|
padding: 0 8px;
|
||||||
|
}
|
||||||
|
.info {
|
||||||
|
display: flex;
|
||||||
|
flex-wrap: wrap;
|
||||||
|
width: 100%;
|
||||||
|
padding: 6px 8px 6px 24px;
|
||||||
|
margin: 0;
|
||||||
|
background-color: #F3F3F5;
|
||||||
|
}
|
||||||
|
.info .item {
|
||||||
|
flex: auto;
|
||||||
|
list-style-type: square;
|
||||||
|
font-size: 14px;
|
||||||
|
color: #585757;
|
||||||
|
}
|
||||||
|
.info .item .value {
|
||||||
|
color: black;
|
||||||
|
}
|
||||||
</style>
|
</style>
|
||||||
</head>
|
</head>
|
||||||
|
<ul class="info">
|
||||||
|
<li class="item">
|
||||||
|
<span>ID:</span>
|
||||||
|
<span class="value task-id"></span>
|
||||||
|
</li>
|
||||||
|
<li class="item">
|
||||||
|
<span>{% trans 'Task name' %}:</span>
|
||||||
|
<span class="value task-type"></span>
|
||||||
|
</li>
|
||||||
|
<li class="item">
|
||||||
|
<span>{% trans 'Date start' %}:</span>
|
||||||
|
<span class="value date-start"></span>
|
||||||
|
</li>
|
||||||
|
</ul>
|
||||||
<div id="term" style="height: 100%;width: 100%">
|
<div id="term" style="height: 100%;width: 100%">
|
||||||
</div>
|
</div>
|
||||||
|
|
||||||
|
@ -52,6 +89,7 @@
|
||||||
var failOverWsURL = scheme + "://" + document.location.hostname + ':' + failOverPort + url;
|
var failOverWsURL = scheme + "://" + document.location.hostname + ':' + failOverPort + url;
|
||||||
var term;
|
var term;
|
||||||
var ws;
|
var ws;
|
||||||
|
var extraQuery = Object.fromEntries(new URLSearchParams(window.location.search));
|
||||||
|
|
||||||
$(document).ready(function () {
|
$(document).ready(function () {
|
||||||
term = new Terminal({
|
term = new Terminal({
|
||||||
|
@ -85,7 +123,23 @@
|
||||||
term.write("Connect websocket server error")
|
term.write("Connect websocket server error")
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
getAutomationExecutionInfo();
|
||||||
}).on('resize', window, function () {
|
}).on('resize', window, function () {
|
||||||
window.fit.fit(term);
|
window.fit.fit(term);
|
||||||
});
|
});
|
||||||
|
function getAutomationExecutionInfo() {
|
||||||
|
let url = "{% url 'api-ops:task-executions-detail' pk=task_id %}";
|
||||||
|
|
||||||
|
requestApi({
|
||||||
|
url: url,
|
||||||
|
method: "GET",
|
||||||
|
flash_message: false,
|
||||||
|
success(data) {
|
||||||
|
const dateStart = new Date(data.date_start).toLocaleString();
|
||||||
|
$('.task-id').html(data.id);
|
||||||
|
$('.task-type').html(data.task_name);
|
||||||
|
$('.date-start').html(dateStart);
|
||||||
|
}
|
||||||
|
})
|
||||||
|
}
|
||||||
</script>
|
</script>
|
||||||
|
|
|
@ -27,7 +27,7 @@ urlpatterns = [
|
||||||
path('variables/help/', api.JobRunVariableHelpAPIView.as_view(), name='variable-help'),
|
path('variables/help/', api.JobRunVariableHelpAPIView.as_view(), name='variable-help'),
|
||||||
path('job-execution/asset-detail/', api.JobAssetDetail.as_view(), name='asset-detail'),
|
path('job-execution/asset-detail/', api.JobAssetDetail.as_view(), name='asset-detail'),
|
||||||
path('job-execution/task-detail/<uuid:task_id>/', api.JobExecutionTaskDetail.as_view(), name='task-detail'),
|
path('job-execution/task-detail/<uuid:task_id>/', api.JobExecutionTaskDetail.as_view(), name='task-detail'),
|
||||||
path('frequent-username/', api.FrequentUsernames.as_view(), name='frequent-usernames'),
|
path('username-hints/', api.UsernameHintsAPI.as_view(), name='username-hints'),
|
||||||
path('ansible/job-execution/<uuid:pk>/log/', api.AnsibleTaskLogApi.as_view(), name='job-execution-log'),
|
path('ansible/job-execution/<uuid:pk>/log/', api.AnsibleTaskLogApi.as_view(), name='job-execution-log'),
|
||||||
|
|
||||||
path('celery/task/<uuid:name>/task-execution/<uuid:pk>/log/', api.CeleryTaskExecutionLogApi.as_view(),
|
path('celery/task/<uuid:name>/task-execution/<uuid:pk>/log/', api.CeleryTaskExecutionLogApi.as_view(),
|
||||||
|
|
|
@ -59,8 +59,6 @@ def expire_user_orgs(*args):
|
||||||
|
|
||||||
@receiver(post_save, sender=Organization)
|
@receiver(post_save, sender=Organization)
|
||||||
def on_org_create(sender, instance, created=False, **kwargs):
|
def on_org_create(sender, instance, created=False, **kwargs):
|
||||||
if not created:
|
|
||||||
return
|
|
||||||
expire_user_orgs()
|
expire_user_orgs()
|
||||||
|
|
||||||
|
|
||||||
|
@ -80,7 +78,7 @@ def on_org_create_or_update(sender, instance, **kwargs):
|
||||||
|
|
||||||
|
|
||||||
@receiver(pre_delete, sender=Organization)
|
@receiver(pre_delete, sender=Organization)
|
||||||
def on_org_delete(sender, instance, **kwargs):
|
def delete_org_root_node_on_org_delete(sender, instance, **kwargs):
|
||||||
expire_orgs_mapping_for_memory(instance.id)
|
expire_orgs_mapping_for_memory(instance.id)
|
||||||
|
|
||||||
# 删除该组织下所有 节点
|
# 删除该组织下所有 节点
|
||||||
|
@ -91,7 +89,7 @@ def on_org_delete(sender, instance, **kwargs):
|
||||||
|
|
||||||
|
|
||||||
@receiver(post_delete, sender=Organization)
|
@receiver(post_delete, sender=Organization)
|
||||||
def on_org_delete(sender, instance, **kwargs):
|
def expire_user_orgs_on_org_delete(sender, instance, **kwargs):
|
||||||
expire_user_orgs()
|
expire_user_orgs()
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -1,4 +1,6 @@
|
||||||
import abc
|
import abc
|
||||||
|
import re
|
||||||
|
from collections import defaultdict
|
||||||
from urllib.parse import parse_qsl
|
from urllib.parse import parse_qsl
|
||||||
|
|
||||||
from django.conf import settings
|
from django.conf import settings
|
||||||
|
@ -11,6 +13,7 @@ from rest_framework.response import Response
|
||||||
|
|
||||||
from accounts.const import AliasAccount
|
from accounts.const import AliasAccount
|
||||||
from assets.api import SerializeToTreeNodeMixin
|
from assets.api import SerializeToTreeNodeMixin
|
||||||
|
from assets.const import AllTypes
|
||||||
from assets.models import Asset
|
from assets.models import Asset
|
||||||
from assets.utils import KubernetesTree
|
from assets.utils import KubernetesTree
|
||||||
from authentication.models import ConnectionToken
|
from authentication.models import ConnectionToken
|
||||||
|
@ -26,7 +29,8 @@ from ..mixin import SelfOrPKUserMixin
|
||||||
__all__ = [
|
__all__ = [
|
||||||
'UserGrantedK8sAsTreeApi',
|
'UserGrantedK8sAsTreeApi',
|
||||||
'UserPermedNodesWithAssetsAsTreeApi',
|
'UserPermedNodesWithAssetsAsTreeApi',
|
||||||
'UserPermedNodeChildrenWithAssetsAsTreeApi'
|
'UserPermedNodeChildrenWithAssetsAsTreeApi',
|
||||||
|
'UserPermedNodeChildrenWithAssetsAsCategoryTreeApi',
|
||||||
]
|
]
|
||||||
|
|
||||||
|
|
||||||
|
@ -137,6 +141,74 @@ class UserPermedNodeChildrenWithAssetsAsTreeApi(BaseUserNodeWithAssetAsTreeApi):
|
||||||
return self.query_node_key or self.default_unfolded_node_key
|
return self.query_node_key or self.default_unfolded_node_key
|
||||||
|
|
||||||
|
|
||||||
|
class UserPermedNodeChildrenWithAssetsAsCategoryTreeApi(
|
||||||
|
SelfOrPKUserMixin, SerializeToTreeNodeMixin, ListAPIView
|
||||||
|
):
|
||||||
|
@property
|
||||||
|
def is_sync(self):
|
||||||
|
sync = self.request.query_params.get('sync', 0)
|
||||||
|
return int(sync) == 1
|
||||||
|
|
||||||
|
@property
|
||||||
|
def tp(self):
|
||||||
|
return self.request.query_params.get('type')
|
||||||
|
|
||||||
|
def get_assets(self):
|
||||||
|
query_asset_util = UserPermAssetUtil(self.user)
|
||||||
|
node = PermNode.objects.filter(
|
||||||
|
granted_node_rels__user=self.user, parent_key='').first()
|
||||||
|
if node:
|
||||||
|
__, assets = query_asset_util.get_node_all_assets(node.id)
|
||||||
|
else:
|
||||||
|
assets = Asset.objects.none()
|
||||||
|
return assets
|
||||||
|
|
||||||
|
def to_tree_nodes(self, assets):
|
||||||
|
if not assets:
|
||||||
|
return []
|
||||||
|
assets = assets.annotate(tp=F('platform__type'))
|
||||||
|
asset_type_map = defaultdict(list)
|
||||||
|
for asset in assets:
|
||||||
|
asset_type_map[asset.tp].append(asset)
|
||||||
|
tp = self.tp
|
||||||
|
if tp:
|
||||||
|
assets = asset_type_map.get(tp, [])
|
||||||
|
if not assets:
|
||||||
|
return []
|
||||||
|
pid = f'ROOT_{str(assets[0].category).upper()}_{tp}'
|
||||||
|
return self.serialize_assets(assets, pid=pid)
|
||||||
|
resource_platforms = assets.order_by('id').values_list('platform_id', flat=True)
|
||||||
|
node_all = AllTypes.get_tree_nodes(resource_platforms)
|
||||||
|
pattern = re.compile(r'\(0\)?')
|
||||||
|
nodes = []
|
||||||
|
for node in node_all:
|
||||||
|
meta = node.get('meta', {})
|
||||||
|
if pattern.search(node['name']) or meta.get('type') == 'platform':
|
||||||
|
continue
|
||||||
|
_type = meta.get('_type')
|
||||||
|
if _type:
|
||||||
|
node['type'] = _type
|
||||||
|
nodes.append(node)
|
||||||
|
|
||||||
|
if not self.is_sync:
|
||||||
|
return nodes
|
||||||
|
|
||||||
|
asset_nodes = []
|
||||||
|
for node in nodes:
|
||||||
|
node['open'] = True
|
||||||
|
tp = node.get('meta', {}).get('_type')
|
||||||
|
if not tp:
|
||||||
|
continue
|
||||||
|
assets = asset_type_map.get(tp, [])
|
||||||
|
asset_nodes += self.serialize_assets(assets, pid=node['id'])
|
||||||
|
return nodes + asset_nodes
|
||||||
|
|
||||||
|
def list(self, request, *args, **kwargs):
|
||||||
|
assets = self.get_assets()
|
||||||
|
nodes = self.to_tree_nodes(assets)
|
||||||
|
return Response(data=nodes)
|
||||||
|
|
||||||
|
|
||||||
class UserGrantedK8sAsTreeApi(SelfOrPKUserMixin, ListAPIView):
|
class UserGrantedK8sAsTreeApi(SelfOrPKUserMixin, ListAPIView):
|
||||||
""" 用户授权的K8s树 """
|
""" 用户授权的K8s树 """
|
||||||
|
|
||||||
|
|
|
@ -97,6 +97,7 @@ class AssetPermissionSerializer(BulkOrgResourceModelSerializer):
|
||||||
if condition in username_secret_type_dict:
|
if condition in username_secret_type_dict:
|
||||||
continue
|
continue
|
||||||
account_data = {key: getattr(template, key) for key in account_attribute}
|
account_data = {key: getattr(template, key) for key in account_attribute}
|
||||||
|
account_data['su_from'] = template.get_su_from_account(asset)
|
||||||
account_data['name'] = f"{account_data['name']}-{_('Account template')}"
|
account_data['name'] = f"{account_data['name']}-{_('Account template')}"
|
||||||
need_create_accounts.append(Account(**{'asset_id': asset.id, **account_data}))
|
need_create_accounts.append(Account(**{'asset_id': asset.id, **account_data}))
|
||||||
return Account.objects.bulk_create(need_create_accounts)
|
return Account.objects.bulk_create(need_create_accounts)
|
||||||
|
|
|
@ -8,7 +8,7 @@ from rest_framework import serializers
|
||||||
from accounts.models import Account
|
from accounts.models import Account
|
||||||
from assets.const import Category, AllTypes
|
from assets.const import Category, AllTypes
|
||||||
from assets.models import Node, Asset, Platform
|
from assets.models import Node, Asset, Platform
|
||||||
from assets.serializers.asset.common import AssetProtocolsPermsSerializer
|
from assets.serializers.asset.common import AssetProtocolsPermsSerializer, AssetLabelSerializer
|
||||||
from common.serializers.fields import ObjectRelatedField, LabeledChoiceField
|
from common.serializers.fields import ObjectRelatedField, LabeledChoiceField
|
||||||
from orgs.mixins.serializers import OrgResourceModelSerializerMixin
|
from orgs.mixins.serializers import OrgResourceModelSerializerMixin
|
||||||
from perms.serializers.permission import ActionChoicesField
|
from perms.serializers.permission import ActionChoicesField
|
||||||
|
@ -25,13 +25,15 @@ class AssetPermedSerializer(OrgResourceModelSerializerMixin):
|
||||||
protocols = AssetProtocolsPermsSerializer(many=True, required=False, label=_('Protocols'))
|
protocols = AssetProtocolsPermsSerializer(many=True, required=False, label=_('Protocols'))
|
||||||
category = LabeledChoiceField(choices=Category.choices, read_only=True, label=_('Category'))
|
category = LabeledChoiceField(choices=Category.choices, read_only=True, label=_('Category'))
|
||||||
type = LabeledChoiceField(choices=AllTypes.choices(), read_only=True, label=_('Type'))
|
type = LabeledChoiceField(choices=AllTypes.choices(), read_only=True, label=_('Type'))
|
||||||
|
labels = AssetLabelSerializer(many=True, required=False, label=_('Label'))
|
||||||
domain = ObjectRelatedField(required=False, queryset=Node.objects, label=_('Domain'))
|
domain = ObjectRelatedField(required=False, queryset=Node.objects, label=_('Domain'))
|
||||||
|
|
||||||
class Meta:
|
class Meta:
|
||||||
model = Asset
|
model = Asset
|
||||||
only_fields = [
|
only_fields = [
|
||||||
"id", "name", "address", 'domain', 'platform',
|
'id', 'name', 'address', 'domain', 'platform',
|
||||||
"comment", "org_id", "is_active",
|
'comment', 'org_id', 'is_active', 'date_verified',
|
||||||
|
'created_by', 'date_created', 'connectivity', 'nodes', 'labels'
|
||||||
]
|
]
|
||||||
fields = only_fields + ['protocols', 'category', 'type'] + ['org_name']
|
fields = only_fields + ['protocols', 'category', 'type'] + ['org_name']
|
||||||
read_only_fields = fields
|
read_only_fields = fields
|
||||||
|
|
|
@ -37,6 +37,9 @@ user_permission_urlpatterns = [
|
||||||
path('<str:user>/nodes/children-with-assets/tree/',
|
path('<str:user>/nodes/children-with-assets/tree/',
|
||||||
api.UserPermedNodeChildrenWithAssetsAsTreeApi.as_view(),
|
api.UserPermedNodeChildrenWithAssetsAsTreeApi.as_view(),
|
||||||
name='user-node-children-with-assets-as-tree'),
|
name='user-node-children-with-assets-as-tree'),
|
||||||
|
path('<str:user>/nodes/children-with-assets/category/tree/',
|
||||||
|
api.UserPermedNodeChildrenWithAssetsAsCategoryTreeApi.as_view(),
|
||||||
|
name='user-node-children-with-assets-as-category-tree'),
|
||||||
# 同步树
|
# 同步树
|
||||||
path('<str:user>/nodes/all-with-assets/tree/',
|
path('<str:user>/nodes/all-with-assets/tree/',
|
||||||
api.UserPermedNodesWithAssetsAsTreeApi.as_view(),
|
api.UserPermedNodesWithAssetsAsTreeApi.as_view(),
|
||||||
|
|
|
@ -1,15 +1,11 @@
|
||||||
from assets.models import FavoriteAsset, Asset
|
|
||||||
|
|
||||||
from django.conf import settings
|
from django.conf import settings
|
||||||
from django.db.models import Q
|
from django.db.models import Q
|
||||||
|
|
||||||
|
from assets.models import FavoriteAsset, Asset
|
||||||
from common.utils.common import timeit
|
from common.utils.common import timeit
|
||||||
|
|
||||||
from perms.models import AssetPermission, PermNode, UserAssetGrantedTreeNodeRelation
|
from perms.models import AssetPermission, PermNode, UserAssetGrantedTreeNodeRelation
|
||||||
|
|
||||||
from .permission import AssetPermissionUtil
|
from .permission import AssetPermissionUtil
|
||||||
|
|
||||||
|
|
||||||
__all__ = ['AssetPermissionPermAssetUtil', 'UserPermAssetUtil', 'UserPermNodeUtil']
|
__all__ = ['AssetPermissionPermAssetUtil', 'UserPermAssetUtil', 'UserPermNodeUtil']
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -1,9 +1,9 @@
|
||||||
from django.utils.translation import gettext_lazy as _
|
|
||||||
from django.db import models
|
|
||||||
from django.db.models import Q
|
|
||||||
from django.conf import settings
|
from django.conf import settings
|
||||||
from django.core.exceptions import ValidationError
|
from django.core.exceptions import ValidationError
|
||||||
|
from django.db import models
|
||||||
|
from django.db.models import Q
|
||||||
from django.db.models.signals import post_save
|
from django.db.models.signals import post_save
|
||||||
|
from django.utils.translation import gettext_lazy as _
|
||||||
from rest_framework.serializers import ValidationError
|
from rest_framework.serializers import ValidationError
|
||||||
|
|
||||||
from common.db.models import JMSBaseModel, CASCADE_SIGNAL_SKIP
|
from common.db.models import JMSBaseModel, CASCADE_SIGNAL_SKIP
|
||||||
|
@ -109,6 +109,13 @@ class RoleBinding(JMSBaseModel):
|
||||||
def is_scope_org(self):
|
def is_scope_org(self):
|
||||||
return self.scope == Scope.org
|
return self.scope == Scope.org
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def orgs_order_by_name(orgs):
|
||||||
|
from orgs.models import Organization
|
||||||
|
default_system_org_ids = [Organization.DEFAULT_ID, Organization.SYSTEM_ID]
|
||||||
|
default_system_orgs = orgs.filter(id__in=default_system_org_ids)
|
||||||
|
return default_system_orgs | orgs.exclude(id__in=default_system_org_ids).order_by('name')
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def get_user_has_the_perm_orgs(cls, perm, user):
|
def get_user_has_the_perm_orgs(cls, perm, user):
|
||||||
from orgs.models import Organization
|
from orgs.models import Organization
|
||||||
|
@ -134,6 +141,7 @@ class RoleBinding(JMSBaseModel):
|
||||||
org_ids = [b.org.id for b in bindings if b.org]
|
org_ids = [b.org.id for b in bindings if b.org]
|
||||||
orgs = all_orgs.filter(id__in=org_ids)
|
orgs = all_orgs.filter(id__in=org_ids)
|
||||||
|
|
||||||
|
orgs = cls.orgs_order_by_name(orgs)
|
||||||
workbench_perm = 'rbac.view_workbench'
|
workbench_perm = 'rbac.view_workbench'
|
||||||
# 全局组织
|
# 全局组织
|
||||||
if orgs and perm != workbench_perm and user.has_perm('orgs.view_rootorg'):
|
if orgs and perm != workbench_perm and user.has_perm('orgs.view_rootorg'):
|
||||||
|
@ -183,7 +191,7 @@ class OrgRoleBinding(RoleBinding):
|
||||||
|
|
||||||
class SystemRoleBindingManager(RoleBindingManager):
|
class SystemRoleBindingManager(RoleBindingManager):
|
||||||
def get_queryset(self):
|
def get_queryset(self):
|
||||||
queryset = super(RoleBindingManager, self).get_queryset()\
|
queryset = super(RoleBindingManager, self).get_queryset() \
|
||||||
.filter(scope=Scope.system)
|
.filter(scope=Scope.system)
|
||||||
return queryset
|
return queryset
|
||||||
|
|
||||||
|
|
|
@ -48,3 +48,4 @@ class PrivateSettingSerializer(PublicSettingSerializer):
|
||||||
ANNOUNCEMENT = serializers.DictField()
|
ANNOUNCEMENT = serializers.DictField()
|
||||||
|
|
||||||
TICKETS_ENABLED = serializers.BooleanField()
|
TICKETS_ENABLED = serializers.BooleanField()
|
||||||
|
CONNECTION_TOKEN_REUSABLE = serializers.BooleanField()
|
||||||
|
|
|
@ -94,15 +94,15 @@ class SecurityAuthSerializer(serializers.Serializer):
|
||||||
)
|
)
|
||||||
USER_LOGIN_SINGLE_MACHINE_ENABLED = serializers.BooleanField(
|
USER_LOGIN_SINGLE_MACHINE_ENABLED = serializers.BooleanField(
|
||||||
required=False, default=False, label=_("Only single device login"),
|
required=False, default=False, label=_("Only single device login"),
|
||||||
help_text=_("Next device login, pre login will be logout")
|
help_text=_("After the user logs in on the new device, other logged-in devices will automatically log out")
|
||||||
)
|
)
|
||||||
ONLY_ALLOW_EXIST_USER_AUTH = serializers.BooleanField(
|
ONLY_ALLOW_EXIST_USER_AUTH = serializers.BooleanField(
|
||||||
required=False, default=False, label=_("Only exist user login"),
|
required=False, default=False, label=_("Only exist user login"),
|
||||||
help_text=_("If enable, CAS、OIDC auth will be failed, if user not exist yet")
|
help_text=_("If enabled, non-existent users will not be allowed to log in; if disabled, users of other authentication methods except local authentication methods are allowed to log in and automatically create users (if the user does not exist)")
|
||||||
)
|
)
|
||||||
ONLY_ALLOW_AUTH_FROM_SOURCE = serializers.BooleanField(
|
ONLY_ALLOW_AUTH_FROM_SOURCE = serializers.BooleanField(
|
||||||
required=False, default=False, label=_("Only from source login"),
|
required=False, default=False, label=_("Only from source login"),
|
||||||
help_text=_("Only log in from the user source property")
|
help_text=_("If it is enabled, the user will only authenticate to the source when logging in; if it is disabled, the user will authenticate all the enabled authentication methods in a certain order when logging in, and as long as one of the authentication methods is successful, they can log in directly")
|
||||||
)
|
)
|
||||||
SECURITY_MFA_VERIFY_TTL = serializers.IntegerField(
|
SECURITY_MFA_VERIFY_TTL = serializers.IntegerField(
|
||||||
min_value=5, max_value=60 * 60 * 10,
|
min_value=5, max_value=60 * 60 * 10,
|
||||||
|
|
|
@ -19,6 +19,9 @@ class AppletHostViewSet(JMSBulkModelViewSet):
|
||||||
serializer_class = AppletHostSerializer
|
serializer_class = AppletHostSerializer
|
||||||
queryset = AppletHost.objects.all()
|
queryset = AppletHost.objects.all()
|
||||||
search_fields = ['asset_ptr__name', 'asset_ptr__address', ]
|
search_fields = ['asset_ptr__name', 'asset_ptr__address', ]
|
||||||
|
rbac_perms = {
|
||||||
|
'generate_accounts': 'terminal.change_applethost',
|
||||||
|
}
|
||||||
|
|
||||||
def dispatch(self, request, *args, **kwargs):
|
def dispatch(self, request, *args, **kwargs):
|
||||||
with tmp_to_builtin_org(system=1):
|
with tmp_to_builtin_org(system=1):
|
||||||
|
@ -37,6 +40,12 @@ class AppletHostViewSet(JMSBulkModelViewSet):
|
||||||
instance.check_terminal_binding(request)
|
instance.check_terminal_binding(request)
|
||||||
return Response({'msg': 'ok'})
|
return Response({'msg': 'ok'})
|
||||||
|
|
||||||
|
@action(methods=['put'], detail=True, url_path='generate-accounts')
|
||||||
|
def generate_accounts(self, request, *args, **kwargs):
|
||||||
|
instance = self.get_object()
|
||||||
|
instance.generate_accounts()
|
||||||
|
return Response({'msg': 'ok'})
|
||||||
|
|
||||||
|
|
||||||
class AppletHostDeploymentViewSet(viewsets.ModelViewSet):
|
class AppletHostDeploymentViewSet(viewsets.ModelViewSet):
|
||||||
serializer_class = AppletHostDeploymentSerializer
|
serializer_class = AppletHostDeploymentSerializer
|
||||||
|
|
|
@ -186,6 +186,8 @@ class SessionReplayViewSet(AsyncApiMixin, viewsets.ViewSet):
|
||||||
tp = 'guacamole'
|
tp = 'guacamole'
|
||||||
if url.endswith('.cast.gz'):
|
if url.endswith('.cast.gz'):
|
||||||
tp = 'asciicast'
|
tp = 'asciicast'
|
||||||
|
if url.endswith('.replay.mp4'):
|
||||||
|
tp = 'mp4'
|
||||||
|
|
||||||
download_url = reverse('api-terminal:session-replay-download', kwargs={'pk': session.id})
|
download_url = reverse('api-terminal:session-replay-download', kwargs={'pk': session.id})
|
||||||
data = {
|
data = {
|
||||||
|
|
|
@ -1,8 +1,8 @@
|
||||||
from rest_framework.exceptions import MethodNotAllowed, ValidationError
|
|
||||||
from rest_framework.decorators import action
|
|
||||||
from rest_framework.response import Response
|
|
||||||
from django.conf import settings
|
from django.conf import settings
|
||||||
from django.utils.translation import ugettext_lazy as _
|
from django.utils.translation import ugettext_lazy as _
|
||||||
|
from rest_framework.decorators import action
|
||||||
|
from rest_framework.exceptions import MethodNotAllowed, ValidationError
|
||||||
|
from rest_framework.response import Response
|
||||||
|
|
||||||
from common.const.http import PATCH
|
from common.const.http import PATCH
|
||||||
from orgs.mixins.api import OrgModelViewSet
|
from orgs.mixins.api import OrgModelViewSet
|
||||||
|
|
|
@ -25,6 +25,16 @@
|
||||||
register: rds_install
|
register: rds_install
|
||||||
|
|
||||||
- name: Stop Tinker before install (jumpserver)
|
- name: Stop Tinker before install (jumpserver)
|
||||||
|
ansible.windows.win_powershell:
|
||||||
|
script: |
|
||||||
|
if (Get-Process -Name 'tinker' -ErrorAction SilentlyContinue) {
|
||||||
|
TASKKILL /F /IM tinker.exe /T
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
$Ansible.Changed = $false
|
||||||
|
}
|
||||||
|
|
||||||
|
- name: Stop Tinkerd before install (jumpserver)
|
||||||
ansible.windows.win_powershell:
|
ansible.windows.win_powershell:
|
||||||
script: |
|
script: |
|
||||||
if (Get-Service -Name 'JumpServer Tinker' -ErrorAction SilentlyContinue) {
|
if (Get-Service -Name 'JumpServer Tinker' -ErrorAction SilentlyContinue) {
|
||||||
|
|
|
@ -48,6 +48,7 @@ class TerminalType(TextChoices):
|
||||||
magnus = 'magnus', 'Magnus'
|
magnus = 'magnus', 'Magnus'
|
||||||
razor = 'razor', 'Razor'
|
razor = 'razor', 'Razor'
|
||||||
tinker = 'tinker', 'Tinker'
|
tinker = 'tinker', 'Tinker'
|
||||||
|
video_worker = 'video_worker', 'Video Worker'
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def types(cls):
|
def types(cls):
|
||||||
|
|
|
@ -4,7 +4,6 @@ from django.db import migrations, models
|
||||||
|
|
||||||
|
|
||||||
class Migration(migrations.Migration):
|
class Migration(migrations.Migration):
|
||||||
|
|
||||||
dependencies = [
|
dependencies = [
|
||||||
('terminal', '0049_endpoint_redis_port'),
|
('terminal', '0049_endpoint_redis_port'),
|
||||||
]
|
]
|
||||||
|
@ -13,10 +12,10 @@ class Migration(migrations.Migration):
|
||||||
migrations.AlterField(
|
migrations.AlterField(
|
||||||
model_name='terminal',
|
model_name='terminal',
|
||||||
name='type',
|
name='type',
|
||||||
field=models.CharField(choices=[
|
field=models.CharField(
|
||||||
('koko', 'KoKo'), ('guacamole', 'Guacamole'), ('omnidb', 'OmniDB'),
|
choices=[('koko', 'KoKo'), ('guacamole', 'Guacamole'), ('omnidb', 'OmniDB'), ('xrdp', 'Xrdp'),
|
||||||
('xrdp', 'Xrdp'), ('lion', 'Lion'), ('core', 'Core'), ('celery', 'Celery'),
|
('lion', 'Lion'), ('core', 'Core'), ('celery', 'Celery'), ('magnus', 'Magnus'),
|
||||||
('magnus', 'Magnus'), ('razor', 'Razor'), ('tinker', 'Tinker'),
|
('razor', 'Razor'), ('tinker', 'Tinker'), ('video_worker', 'Video Worker')], default='koko',
|
||||||
], default='koko', max_length=64, verbose_name='type'),
|
max_length=64, verbose_name='type'),
|
||||||
),
|
),
|
||||||
]
|
]
|
||||||
|
|
|
@ -0,0 +1,18 @@
|
||||||
|
# Generated by Django 3.2.17 on 2023-05-09 11:02
|
||||||
|
|
||||||
|
from django.db import migrations, models
|
||||||
|
|
||||||
|
|
||||||
|
class Migration(migrations.Migration):
|
||||||
|
|
||||||
|
dependencies = [
|
||||||
|
('terminal', '0060_sessionsharing_action_permission'),
|
||||||
|
]
|
||||||
|
|
||||||
|
operations = [
|
||||||
|
migrations.AddField(
|
||||||
|
model_name='applet',
|
||||||
|
name='can_concurrent',
|
||||||
|
field=models.BooleanField(default=True, verbose_name='Can concurrent'),
|
||||||
|
),
|
||||||
|
]
|
|
@ -32,6 +32,7 @@ class Applet(JMSBaseModel):
|
||||||
is_active = models.BooleanField(default=True, verbose_name=_('Is active'))
|
is_active = models.BooleanField(default=True, verbose_name=_('Is active'))
|
||||||
builtin = models.BooleanField(default=False, verbose_name=_('Builtin'))
|
builtin = models.BooleanField(default=False, verbose_name=_('Builtin'))
|
||||||
protocols = models.JSONField(default=list, verbose_name=_('Protocol'))
|
protocols = models.JSONField(default=list, verbose_name=_('Protocol'))
|
||||||
|
can_concurrent = models.BooleanField(default=True, verbose_name=_('Can concurrent'))
|
||||||
tags = models.JSONField(default=list, verbose_name=_('Tags'))
|
tags = models.JSONField(default=list, verbose_name=_('Tags'))
|
||||||
comment = models.TextField(default='', blank=True, verbose_name=_('Comment'))
|
comment = models.TextField(default='', blank=True, verbose_name=_('Comment'))
|
||||||
hosts = models.ManyToManyField(
|
hosts = models.ManyToManyField(
|
||||||
|
@ -134,37 +135,70 @@ class Applet(JMSBaseModel):
|
||||||
shutil.copytree(path, pkg_path)
|
shutil.copytree(path, pkg_path)
|
||||||
return instance, serializer
|
return instance, serializer
|
||||||
|
|
||||||
def select_host_account(self):
|
def select_host(self, user):
|
||||||
# 选择激活的发布机
|
|
||||||
hosts = [
|
hosts = [
|
||||||
host for host in self.hosts.filter(is_active=True)
|
host for host in self.hosts.filter(is_active=True)
|
||||||
if host.load != 'offline'
|
if host.load != 'offline'
|
||||||
]
|
]
|
||||||
|
|
||||||
if not hosts:
|
if not hosts:
|
||||||
return None
|
return None
|
||||||
|
|
||||||
key_tmpl = 'applet_host_accounts_{}_{}'
|
prefer_key = 'applet_host_prefer_{}'.format(user.id)
|
||||||
host = random.choice(hosts)
|
prefer_host_id = cache.get(prefer_key, None)
|
||||||
using_keys = cache.keys(key_tmpl.format(host.id, '*')) or []
|
pref_host = [host for host in hosts if host.id == prefer_host_id]
|
||||||
accounts_username_used = list(cache.get_many(using_keys).values())
|
if pref_host:
|
||||||
logger.debug('Applet host account using: {}: {}'.format(host.name, accounts_username_used))
|
host = pref_host[0]
|
||||||
accounts = host.accounts.all() \
|
else:
|
||||||
.filter(is_active=True, privileged=False) \
|
host = random.choice(hosts)
|
||||||
.exclude(username__in=accounts_username_used)
|
cache.set(prefer_key, host.id, timeout=None)
|
||||||
|
return host
|
||||||
|
|
||||||
msg = 'Applet host remain accounts: {}: {}'.format(host.name, len(accounts))
|
@staticmethod
|
||||||
|
def random_select_prefer_account(user, host, accounts):
|
||||||
|
msg = 'Applet host remain public accounts: {}: {}'.format(host.name, len(accounts))
|
||||||
if len(accounts) == 0:
|
if len(accounts) == 0:
|
||||||
logger.error(msg)
|
logger.error(msg)
|
||||||
else:
|
|
||||||
logger.debug(msg)
|
|
||||||
|
|
||||||
if not accounts:
|
|
||||||
return None
|
return None
|
||||||
|
prefer_host_account_key = 'applet_host_prefer_account_{}_{}'.format(user.id, host.id)
|
||||||
|
prefer_account_id = cache.get(prefer_host_account_key, None)
|
||||||
|
prefer_account = None
|
||||||
|
if prefer_account_id:
|
||||||
|
prefer_account = accounts.filter(id=prefer_account_id).first()
|
||||||
|
if prefer_account:
|
||||||
|
account = prefer_account
|
||||||
|
else:
|
||||||
|
account = random.choice(accounts)
|
||||||
|
cache.set(prefer_host_account_key, account.id, timeout=None)
|
||||||
|
return account
|
||||||
|
|
||||||
account = random.choice(accounts)
|
def select_host_account(self, user):
|
||||||
|
# 选择激活的发布机
|
||||||
|
host = self.select_host(user)
|
||||||
|
if not host:
|
||||||
|
return None
|
||||||
|
can_concurrent = self.can_concurrent and self.type == 'general'
|
||||||
|
|
||||||
|
accounts = host.accounts.all().filter(is_active=True, privileged=False)
|
||||||
|
private_account = accounts.filter(username='js_{}'.format(user.username)).first()
|
||||||
|
accounts_using_key_tmpl = 'applet_host_accounts_{}_{}'
|
||||||
|
|
||||||
|
if private_account and can_concurrent:
|
||||||
|
account = private_account
|
||||||
|
else:
|
||||||
|
using_keys = cache.keys(accounts_using_key_tmpl.format(host.id, '*')) or []
|
||||||
|
accounts_username_used = list(cache.get_many(using_keys).values())
|
||||||
|
logger.debug('Applet host account using: {}: {}'.format(host.name, accounts_username_used))
|
||||||
|
|
||||||
|
# 优先使用 private account
|
||||||
|
if private_account and private_account.username not in accounts_username_used:
|
||||||
|
account = private_account
|
||||||
|
else:
|
||||||
|
accounts = accounts.exclude(username__in=accounts_username_used).filter(username__startswith='jms_')
|
||||||
|
account = self.random_select_prefer_account(user, host, accounts)
|
||||||
|
if not account:
|
||||||
|
return
|
||||||
ttl = 60 * 60 * 24
|
ttl = 60 * 60 * 24
|
||||||
lock_key = key_tmpl.format(host.id, account.username)
|
lock_key = accounts_using_key_tmpl.format(host.id, account.username)
|
||||||
cache.set(lock_key, account.username, ttl)
|
cache.set(lock_key, account.username, ttl)
|
||||||
|
|
||||||
return {
|
return {
|
||||||
|
|
|
@ -84,9 +84,13 @@ class AppletHost(Host):
|
||||||
return random_string(16, special_char=True)
|
return random_string(16, special_char=True)
|
||||||
|
|
||||||
def generate_accounts(self):
|
def generate_accounts(self):
|
||||||
amount = int(os.getenv('TERMINAL_ACCOUNTS_AMOUNT', 100))
|
self.generate_public_accounts()
|
||||||
now_count = self.accounts.filter(privileged=False).count()
|
self.generate_private_accounts()
|
||||||
need = amount - now_count
|
|
||||||
|
def generate_public_accounts(self):
|
||||||
|
public_amount = int(os.getenv('TERMINAL_ACCOUNTS_AMOUNT', 100))
|
||||||
|
now_count = self.accounts.filter(privileged=False, username__startswith='jms').count()
|
||||||
|
need = public_amount - now_count
|
||||||
|
|
||||||
accounts = []
|
accounts = []
|
||||||
account_model = self.accounts.model
|
account_model = self.accounts.model
|
||||||
|
@ -99,7 +103,31 @@ class AppletHost(Host):
|
||||||
org_id=self.LOCKING_ORG, is_active=False,
|
org_id=self.LOCKING_ORG, is_active=False,
|
||||||
)
|
)
|
||||||
accounts.append(account)
|
accounts.append(account)
|
||||||
bulk_create_with_history(accounts, account_model, batch_size=20)
|
bulk_create_with_history(accounts, account_model, batch_size=20, ignore_conflicts=True)
|
||||||
|
|
||||||
|
def generate_private_accounts_by_usernames(self, usernames):
|
||||||
|
accounts = []
|
||||||
|
account_model = self.accounts.model
|
||||||
|
for username in usernames:
|
||||||
|
password = self.random_password()
|
||||||
|
username = 'js_' + username
|
||||||
|
account = account_model(
|
||||||
|
username=username, secret=password, name=username,
|
||||||
|
asset_id=self.id, secret_type='password', version=1,
|
||||||
|
org_id=self.LOCKING_ORG, is_active=False,
|
||||||
|
)
|
||||||
|
accounts.append(account)
|
||||||
|
bulk_create_with_history(accounts, account_model, batch_size=20, ignore_conflicts=True)
|
||||||
|
|
||||||
|
def generate_private_accounts(self):
|
||||||
|
from users.models import User
|
||||||
|
usernames = User.objects \
|
||||||
|
.filter(is_active=True, is_service_account=False) \
|
||||||
|
.values_list('username', flat=True)
|
||||||
|
account_usernames = self.accounts.all().values_list('username', flat=True)
|
||||||
|
account_usernames = [username[3:] for username in account_usernames if username.startswith('js_')]
|
||||||
|
not_exist_users = set(usernames) - set(account_usernames)
|
||||||
|
self.generate_private_accounts_by_usernames(not_exist_users)
|
||||||
|
|
||||||
|
|
||||||
class AppletHostDeployment(JMSBaseModel):
|
class AppletHostDeployment(JMSBaseModel):
|
||||||
|
|
|
@ -48,8 +48,8 @@ class Session(OrgModelMixin):
|
||||||
|
|
||||||
upload_to = 'replay'
|
upload_to = 'replay'
|
||||||
ACTIVE_CACHE_KEY_PREFIX = 'SESSION_ACTIVE_{}'
|
ACTIVE_CACHE_KEY_PREFIX = 'SESSION_ACTIVE_{}'
|
||||||
SUFFIX_MAP = {1: '.gz', 2: '.replay.gz', 3: '.cast.gz'}
|
SUFFIX_MAP = {1: '.gz', 2: '.replay.gz', 3: '.cast.gz', 4: '.replay.mp4'}
|
||||||
DEFAULT_SUFFIXES = ['.replay.gz', '.cast.gz', '.gz']
|
DEFAULT_SUFFIXES = ['.replay.gz', '.cast.gz', '.gz', '.replay.mp4']
|
||||||
|
|
||||||
# Todo: 将来干掉 local_path, 使用 default storage 实现
|
# Todo: 将来干掉 local_path, 使用 default storage 实现
|
||||||
def get_all_possible_local_path(self):
|
def get_all_possible_local_path(self):
|
||||||
|
|
|
@ -133,6 +133,13 @@ class SessionJoinRecord(JMSBaseModel, OrgModelMixin):
|
||||||
# self
|
# self
|
||||||
if self.verify_code != self.sharing.verify_code:
|
if self.verify_code != self.sharing.verify_code:
|
||||||
return False, _('Invalid verification code')
|
return False, _('Invalid verification code')
|
||||||
|
|
||||||
|
# Link can only be joined once by the same user.
|
||||||
|
queryset = SessionJoinRecord.objects.filter(
|
||||||
|
verify_code=self.verify_code, sharing=self.sharing,
|
||||||
|
joiner=self.joiner, date_joined__lt=self.date_joined)
|
||||||
|
if queryset.exists():
|
||||||
|
return False, _('You have already joined this session')
|
||||||
return True, ''
|
return True, ''
|
||||||
|
|
||||||
def join_failed(self, reason):
|
def join_failed(self, reason):
|
||||||
|
|
|
@ -61,7 +61,7 @@ class SessionDisplaySerializer(SessionSerializer):
|
||||||
|
|
||||||
class ReplaySerializer(serializers.Serializer):
|
class ReplaySerializer(serializers.Serializer):
|
||||||
file = serializers.FileField(allow_empty_file=True)
|
file = serializers.FileField(allow_empty_file=True)
|
||||||
version = serializers.IntegerField(write_only=True, required=False, min_value=2, max_value=3)
|
version = serializers.IntegerField(write_only=True, required=False, min_value=2, max_value=4)
|
||||||
|
|
||||||
|
|
||||||
class SessionJoinValidateSerializer(serializers.Serializer):
|
class SessionJoinValidateSerializer(serializers.Serializer):
|
||||||
|
|
|
@ -2,11 +2,14 @@ from django.db.models.signals import post_save, post_delete
|
||||||
from django.dispatch import receiver
|
from django.dispatch import receiver
|
||||||
from django.utils.functional import LazyObject
|
from django.utils.functional import LazyObject
|
||||||
|
|
||||||
|
from accounts.models import Account
|
||||||
from common.signals import django_ready
|
from common.signals import django_ready
|
||||||
from common.utils import get_logger
|
from common.utils import get_logger
|
||||||
from common.utils.connection import RedisPubSub
|
from common.utils.connection import RedisPubSub
|
||||||
from orgs.utils import tmp_to_builtin_org
|
from orgs.utils import tmp_to_builtin_org
|
||||||
|
from users.models import User
|
||||||
from ..models import Applet, AppletHost
|
from ..models import Applet, AppletHost
|
||||||
|
from ..tasks import applet_host_generate_accounts
|
||||||
from ..utils import DBPortManager
|
from ..utils import DBPortManager
|
||||||
|
|
||||||
db_port_manager: DBPortManager
|
db_port_manager: DBPortManager
|
||||||
|
@ -19,12 +22,30 @@ def on_applet_host_create(sender, instance, created=False, **kwargs):
|
||||||
return
|
return
|
||||||
applets = Applet.objects.all()
|
applets = Applet.objects.all()
|
||||||
instance.applets.set(applets)
|
instance.applets.set(applets)
|
||||||
with tmp_to_builtin_org(system=1):
|
|
||||||
instance.generate_accounts()
|
|
||||||
|
|
||||||
|
applet_host_generate_accounts.delay(instance.id)
|
||||||
applet_host_change_pub_sub.publish(True)
|
applet_host_change_pub_sub.publish(True)
|
||||||
|
|
||||||
|
|
||||||
|
@receiver(post_save, sender=User)
|
||||||
|
def on_user_create_create_account(sender, instance, created=False, **kwargs):
|
||||||
|
if not created:
|
||||||
|
return
|
||||||
|
|
||||||
|
with tmp_to_builtin_org(system=1):
|
||||||
|
applet_hosts = AppletHost.objects.all()
|
||||||
|
for host in applet_hosts:
|
||||||
|
host.generate_private_accounts_by_usernames([instance.username])
|
||||||
|
|
||||||
|
|
||||||
|
@receiver(post_delete, sender=User)
|
||||||
|
def on_user_delete_remove_account(sender, instance, **kwargs):
|
||||||
|
with tmp_to_builtin_org(system=1):
|
||||||
|
applet_hosts = AppletHost.objects.all().values_list('id', flat=True)
|
||||||
|
accounts = Account.objects.filter(asset_id__in=applet_hosts, username=instance.username)
|
||||||
|
accounts.delete()
|
||||||
|
|
||||||
|
|
||||||
@receiver(post_delete, sender=AppletHost)
|
@receiver(post_delete, sender=AppletHost)
|
||||||
def on_applet_host_delete(sender, instance, **kwargs):
|
def on_applet_host_delete(sender, instance, **kwargs):
|
||||||
applet_host_change_pub_sub.publish(True)
|
applet_host_change_pub_sub.publish(True)
|
||||||
|
|
|
@ -16,7 +16,7 @@ from ops.celery.decorator import (
|
||||||
from orgs.utils import tmp_to_builtin_org
|
from orgs.utils import tmp_to_builtin_org
|
||||||
from .backends import server_replay_storage
|
from .backends import server_replay_storage
|
||||||
from .models import (
|
from .models import (
|
||||||
Status, Session, Task, AppletHostDeployment
|
Status, Session, Task, AppletHostDeployment, AppletHost
|
||||||
)
|
)
|
||||||
from .utils import find_session_replay_local
|
from .utils import find_session_replay_local
|
||||||
|
|
||||||
|
@ -82,7 +82,7 @@ def upload_session_replay_to_external_storage(session_id):
|
||||||
|
|
||||||
@shared_task(
|
@shared_task(
|
||||||
verbose_name=_('Run applet host deployment'),
|
verbose_name=_('Run applet host deployment'),
|
||||||
activity_callback=lambda self, did, *args, **kwargs: ([did], )
|
activity_callback=lambda self, did, *args, **kwargs: ([did],)
|
||||||
)
|
)
|
||||||
def run_applet_host_deployment(did):
|
def run_applet_host_deployment(did):
|
||||||
with tmp_to_builtin_org(system=1):
|
with tmp_to_builtin_org(system=1):
|
||||||
|
@ -98,3 +98,16 @@ def run_applet_host_deployment_install_applet(did, applet_id):
|
||||||
with tmp_to_builtin_org(system=1):
|
with tmp_to_builtin_org(system=1):
|
||||||
deployment = AppletHostDeployment.objects.get(id=did)
|
deployment = AppletHostDeployment.objects.get(id=did)
|
||||||
deployment.install_applet(applet_id)
|
deployment.install_applet(applet_id)
|
||||||
|
|
||||||
|
|
||||||
|
@shared_task(
|
||||||
|
verbose_name=_('Generate applet host accounts'),
|
||||||
|
activity_callback=lambda self, host_id, *args, **kwargs: ([host_id],)
|
||||||
|
)
|
||||||
|
def applet_host_generate_accounts(host_id):
|
||||||
|
applet_host = AppletHost.objects.filter(id=host_id).first()
|
||||||
|
if not applet_host:
|
||||||
|
return
|
||||||
|
|
||||||
|
with tmp_to_builtin_org(system=1):
|
||||||
|
applet_host.generate_accounts()
|
||||||
|
|
|
@ -140,7 +140,7 @@ class ComponentsPrometheusMetricsUtil(TypedComponentsStatusMetricsUtil):
|
||||||
for component in self.components:
|
for component in self.components:
|
||||||
if not component.is_alive:
|
if not component.is_alive:
|
||||||
continue
|
continue
|
||||||
component_stat = component.latest_stat
|
component_stat = component.last_stat
|
||||||
if not component_stat:
|
if not component_stat:
|
||||||
continue
|
continue
|
||||||
metric_text = state_metric_text % (
|
metric_text = state_metric_text % (
|
||||||
|
|
|
@ -40,6 +40,8 @@ class ApplyAssetSerializer(BaseApplyAssetSerializer, TicketApplySerializer):
|
||||||
ticket_extra_kwargs = TicketApplySerializer.Meta.extra_kwargs
|
ticket_extra_kwargs = TicketApplySerializer.Meta.extra_kwargs
|
||||||
extra_kwargs = {
|
extra_kwargs = {
|
||||||
'apply_accounts': {'required': False},
|
'apply_accounts': {'required': False},
|
||||||
|
'apply_date_start': {'allow_null': False},
|
||||||
|
'apply_date_expired': {'allow_null': False},
|
||||||
}
|
}
|
||||||
extra_kwargs.update(ticket_extra_kwargs)
|
extra_kwargs.update(ticket_extra_kwargs)
|
||||||
|
|
||||||
|
|
|
@ -12,13 +12,18 @@ from common.drf.filters import AttrRulesFilterBackend
|
||||||
from common.utils import get_logger
|
from common.utils import get_logger
|
||||||
from orgs.utils import current_org, tmp_to_root_org
|
from orgs.utils import current_org, tmp_to_root_org
|
||||||
from rbac.models import Role, RoleBinding
|
from rbac.models import Role, RoleBinding
|
||||||
|
from rbac.permissions import RBACPermission
|
||||||
from users.utils import LoginBlockUtil, MFABlockUtils
|
from users.utils import LoginBlockUtil, MFABlockUtils
|
||||||
from .mixins import UserQuerysetMixin
|
from .mixins import UserQuerysetMixin
|
||||||
from .. import serializers
|
from .. import serializers
|
||||||
from ..filters import UserFilter
|
from ..filters import UserFilter
|
||||||
from ..models import User
|
from ..models import User
|
||||||
from ..notifications import ResetMFAMsg
|
from ..notifications import ResetMFAMsg
|
||||||
from ..serializers import UserSerializer, MiniUserSerializer, InviteSerializer
|
from ..permissions import UserObjectPermission
|
||||||
|
from ..serializers import (
|
||||||
|
UserSerializer,
|
||||||
|
MiniUserSerializer, InviteSerializer
|
||||||
|
)
|
||||||
from ..signals import post_user_create
|
from ..signals import post_user_create
|
||||||
|
|
||||||
logger = get_logger(__name__)
|
logger = get_logger(__name__)
|
||||||
|
@ -32,6 +37,7 @@ class UserViewSet(CommonApiMixin, UserQuerysetMixin, SuggestionMixin, BulkModelV
|
||||||
filterset_class = UserFilter
|
filterset_class = UserFilter
|
||||||
extra_filter_backends = [AttrRulesFilterBackend]
|
extra_filter_backends = [AttrRulesFilterBackend]
|
||||||
search_fields = ('username', 'email', 'name')
|
search_fields = ('username', 'email', 'name')
|
||||||
|
permission_classes = [RBACPermission, UserObjectPermission]
|
||||||
serializer_classes = {
|
serializer_classes = {
|
||||||
'default': UserSerializer,
|
'default': UserSerializer,
|
||||||
'suggestion': MiniUserSerializer,
|
'suggestion': MiniUserSerializer,
|
||||||
|
|
|
@ -13,6 +13,6 @@ class Migration(migrations.Migration):
|
||||||
migrations.AlterField(
|
migrations.AlterField(
|
||||||
model_name='user',
|
model_name='user',
|
||||||
name='source',
|
name='source',
|
||||||
field=models.CharField(choices=[('local', 'Local'), ('ldap', 'LDAP/AD'), ('openid', 'OpenID'), ('radius', 'Radius'), ('cas', 'CAS'), ('saml2', 'SAML2'), ('oauth2', 'OAuth2'), ('custom', 'Custom')], default='local', max_length=30, verbose_name='Source'),
|
field=models.CharField(choices=[('local', 'Local'), ('ldap', 'LDAP/AD'), ('openid', 'OpenID'), ('radius', 'Radius'), ('cas', 'CAS'), ('saml2', 'SAML2'), ('oauth2', 'OAuth2'), ('wecom', 'WeCom'), ('dingtalk', 'DingTalk'), ('feishu', 'FeiShu'), ('custom', 'Custom')], default='local', max_length=30, verbose_name='Source'),
|
||||||
),
|
),
|
||||||
]
|
]
|
||||||
|
|
|
@ -703,14 +703,15 @@ class User(AuthMixin, TokenMixin, RoleMixin, MFAMixin, JSONFilterMixin, Abstract
|
||||||
cas = 'cas', 'CAS'
|
cas = 'cas', 'CAS'
|
||||||
saml2 = 'saml2', 'SAML2'
|
saml2 = 'saml2', 'SAML2'
|
||||||
oauth2 = 'oauth2', 'OAuth2'
|
oauth2 = 'oauth2', 'OAuth2'
|
||||||
|
wecom = 'wecom', _('WeCom')
|
||||||
|
dingtalk = 'dingtalk', _('DingTalk')
|
||||||
|
feishu = 'feishu', _('FeiShu')
|
||||||
custom = 'custom', 'Custom'
|
custom = 'custom', 'Custom'
|
||||||
|
|
||||||
SOURCE_BACKEND_MAPPING = {
|
SOURCE_BACKEND_MAPPING = {
|
||||||
Source.local: [
|
Source.local: [
|
||||||
settings.AUTH_BACKEND_MODEL,
|
settings.AUTH_BACKEND_MODEL,
|
||||||
settings.AUTH_BACKEND_PUBKEY,
|
settings.AUTH_BACKEND_PUBKEY,
|
||||||
settings.AUTH_BACKEND_WECOM,
|
|
||||||
settings.AUTH_BACKEND_DINGTALK,
|
|
||||||
],
|
],
|
||||||
Source.ldap: [
|
Source.ldap: [
|
||||||
settings.AUTH_BACKEND_LDAP
|
settings.AUTH_BACKEND_LDAP
|
||||||
|
@ -731,6 +732,15 @@ class User(AuthMixin, TokenMixin, RoleMixin, MFAMixin, JSONFilterMixin, Abstract
|
||||||
Source.oauth2: [
|
Source.oauth2: [
|
||||||
settings.AUTH_BACKEND_OAUTH2
|
settings.AUTH_BACKEND_OAUTH2
|
||||||
],
|
],
|
||||||
|
Source.wecom: [
|
||||||
|
settings.AUTH_BACKEND_WECOM
|
||||||
|
],
|
||||||
|
Source.feishu: [
|
||||||
|
settings.AUTH_BACKEND_FEISHU
|
||||||
|
],
|
||||||
|
Source.dingtalk: [
|
||||||
|
settings.AUTH_BACKEND_DINGTALK
|
||||||
|
],
|
||||||
Source.custom: [
|
Source.custom: [
|
||||||
settings.AUTH_BACKEND_CUSTOM
|
settings.AUTH_BACKEND_CUSTOM
|
||||||
]
|
]
|
||||||
|
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue