Merge pull request #10284 from jumpserver/dev

v3.2.0
pull/10289/head
Jiangjie.Bai 2023-04-20 18:23:12 +08:00 committed by GitHub
commit b44e6c258f
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
92 changed files with 2829 additions and 1101 deletions

View File

@ -76,8 +76,8 @@ RUN --mount=type=cache,target=/root/.cache/pip \
&& pip install --upgrade setuptools wheel \
&& pip install https://download.jumpserver.org/pypi/simple/cryptography/cryptography-38.0.4-cp39-cp39-linux_loongarch64.whl \
&& pip install https://download.jumpserver.org/pypi/simple/greenlet/greenlet-1.1.2-cp39-cp39-linux_loongarch64.whl \
&& pip install $(grep 'PyNaCl' requirements/requirements.txt) \
&& GRPC_PYTHON_BUILD_SYSTEM_OPENSSL=true pip install grpcio \
&& pip install https://download.jumpserver.org/pypi/simple/PyNaCl/PyNaCl-1.5.0-cp39-cp39-linux_loongarch64.whl \
&& pip install https://download.jumpserver.org/pypi/simple/grpcio/grpcio-1.54.0-cp39-cp39-linux_loongarch64.whl \
&& pip install $(grep -E 'jms|jumpserver' requirements/requirements.txt) -i ${PIP_JMS_MIRROR} \
&& pip install -r requirements/requirements.txt

View File

@ -8,6 +8,7 @@ from accounts import serializers
from accounts.filters import AccountFilterSet
from accounts.models import Account
from assets.models import Asset, Node
from common.api import ExtraFilterFieldsMixin
from common.permissions import UserConfirmation, ConfirmType, IsValidUser
from common.views.mixins import RecordViewLogMixin
from orgs.mixins.api import OrgBulkModelViewSet
@ -111,7 +112,7 @@ class AssetAccountBulkCreateApi(CreateAPIView):
return Response(data=serializer.data, status=HTTP_200_OK)
class AccountHistoriesSecretAPI(RecordViewLogMixin, ListAPIView):
class AccountHistoriesSecretAPI(ExtraFilterFieldsMixin, RecordViewLogMixin, ListAPIView):
model = Account.history.model
serializer_class = serializers.AccountHistorySerializer
http_method_names = ['get', 'options']
@ -123,6 +124,10 @@ class AccountHistoriesSecretAPI(RecordViewLogMixin, ListAPIView):
def get_object(self):
return get_object_or_404(Account, pk=self.kwargs.get('pk'))
@staticmethod
def filter_spm_queryset(resource_ids, queryset):
return queryset.filter(history_id__in=resource_ids)
def get_queryset(self):
account = self.get_object()
histories = account.history.all()

View File

@ -1,5 +1,5 @@
id: change_secret_by_ssh
name: Change secret by SSH
name: "{{ 'SSH account change secret' | trans }}"
category:
- device
- host
@ -10,5 +10,10 @@ params:
- name: commands
type: list
label: '自定义命令'
default: ['']
help_text: '自定义命令中如需包含账号的 username 和 password 字段,请使用 &#123;username&#125;、&#123;password&#125;格式,执行任务时会进行替换 。<br />比如针对 Linux 主机进行改密,一般需要配置三条命令:<br />1.passwd &#123;username&#125; <br />2.&#123;password&#125; <br />3.&#123;password&#125;'
default: [ '' ]
help_text: '自定义命令中如需包含账号的 账号、密码、SSH 连接的用户密码 字段,<br />请使用 &#123;username&#125;、&#123;password&#125;、&#123;login_password&#125;格式,执行任务时会进行替换 。<br />比如针对 Cisco 主机进行改密,一般需要配置五条命令:<br />1. enable<br />2. &#123;login_password&#125;<br />3. configure terminal<br />4. username &#123;username&#125; privilege 0 password &#123;password&#125; <br />5. end'
i18n:
SSH account change secret:
zh: SSH 账号改密
ja: SSH アカウントのパスワード変更

View File

@ -1,6 +1,11 @@
id: change_secret_mongodb
name: Change secret for MongoDB
name: "{{ 'MongoDB account change secret' | trans }}"
category: database
type:
- mongodb
method: change_secret
i18n:
MongoDB account change secret:
zh: MongoDB 账号改密
ja: MongoDB アカウントのパスワード変更

View File

@ -1,7 +1,12 @@
id: change_secret_mysql
name: Change secret for MySQL
name: "{{ 'MySQL account change secret' | trans }}"
category: database
type:
- mysql
- mariadb
method: change_secret
i18n:
MySQL account change secret:
zh: MySQL 账号改密
ja: MySQL アカウントのパスワード変更

View File

@ -1,6 +1,11 @@
id: change_secret_oracle
name: Change secret for Oracle
name: "{{ 'Oracle account change secret' | trans }}"
category: database
type:
- oracle
method: change_secret
i18n:
Oracle account change secret:
zh: Oracle 账号改密
ja: Oracle アカウントのパスワード変更

View File

@ -1,6 +1,11 @@
id: change_secret_postgresql
name: Change secret for PostgreSQL
name: "{{ 'PostgreSQL account change secret' | trans }}"
category: database
type:
- postgresql
method: change_secret
i18n:
PostgreSQL account change secret:
zh: PostgreSQL 账号改密
ja: PostgreSQL アカウントのパスワード変更

View File

@ -1,6 +1,11 @@
id: change_secret_sqlserver
name: Change secret for SQLServer
name: "{{ 'SQLServer account change secret' | trans }}"
category: database
type:
- sqlserver
method: change_secret
i18n:
SQLServer account change secret:
zh: SQLServer 账号改密
ja: SQLServer アカウントのパスワード変更

View File

@ -1,6 +1,11 @@
id: change_secret_aix
name: Change secret for aix
name: "{{ 'AIX account change secret' | trans }}"
category: host
type:
- AIX
method: change_secret
i18n:
AIX account change secret:
zh: AIX 账号改密
ja: AIX アカウントのパスワード変更

View File

@ -1,7 +1,12 @@
id: change_secret_posix
name: Change secret for posix
name: "{{ 'Posix account change secret' | trans }}"
category: host
type:
- unix
- linux
method: change_secret
i18n:
Posix account change secret:
zh: Posix 账号改密
ja: Posix アカウントのパスワード変更

View File

@ -1,7 +1,12 @@
id: change_secret_local_windows
name: Change secret local account for Windows
name: "{{ 'Windows account change secret' | trans }}"
version: 1
method: change_secret
category: host
type:
- windows
i18n:
Windows account change secret:
zh: Windows 账号改密
ja: Windows アカウントのパスワード変更

View File

@ -1,6 +1,11 @@
id: gather_accounts_mongodb
name: Gather account from MongoDB
name: "{{ 'MongoDB account gather' | trans }}"
category: database
type:
- mongodb
method: gather_accounts
i18n:
MongoDB account gather:
zh: MongoDB 账号收集
ja: MongoDB アカウントの収集

View File

@ -1,7 +1,12 @@
id: gather_accounts_mysql
name: Gather account from MySQL
name: "{{ 'MySQL account gather' | trans }}"
category: database
type:
- mysql
- mariadb
method: gather_accounts
i18n:
MySQL account gather:
zh: MySQL 账号收集
ja: MySQL アカウントの収集

View File

@ -1,6 +1,11 @@
id: gather_accounts_oracle
name: Gather account from Oracle
name: "{{ 'Oracle account gather' | trans }}"
category: database
type:
- oracle
method: gather_accounts
i18n:
Oracle account gather:
zh: Oracle 账号收集
ja: Oracle アカウントの収集

View File

@ -1,6 +1,11 @@
id: gather_accounts_postgresql
name: Gather account for PostgreSQL
name: "{{ 'PostgreSQL account gather' | trans }}"
category: database
type:
- postgresql
method: gather_accounts
i18n:
PostgreSQL account gather:
zh: PostgreSQL 账号收集
ja: PostgreSQL アカウントの収集

View File

@ -1,7 +1,12 @@
id: gather_accounts_posix
name: Gather posix account
name: "{{ 'Posix account gather' | trans }}"
category: host
type:
- linux
- unix
method: gather_accounts
i18n:
Posix account gather:
zh: Posix 账号收集
ja: Posix アカウントの収集

View File

@ -1,7 +1,12 @@
id: gather_accounts_windows
name: Gather account windows
name: "{{ 'Windows account gather' | trans }}"
version: 1
method: gather_accounts
category: host
type:
- windows
i18n:
Windows account gather:
zh: Windows 账号收集
ja: Windows アカウントの収集

View File

@ -1,6 +1,11 @@
id: push_account_mongodb
name: Push account for MongoDB
name: "{{ 'MongoDB account push' | trans }}"
category: database
type:
- mongodb
method: push_account
i18n:
MongoDB account push:
zh: MongoDB 账号推送
ja: MongoDB アカウントのプッシュ

View File

@ -1,7 +1,12 @@
id: push_account_mysql
name: Push account for MySQL
name: "{{ 'MySQL account push' | trans }}"
category: database
type:
- mysql
- mariadb
method: push_account
i18n:
MySQL account push:
zh: MySQL 账号推送
ja: MySQL アカウントのプッシュ

View File

@ -1,6 +1,11 @@
id: push_account_oracle
name: Push account for Oracle
name: "{{ 'Oracle account push' | trans }}"
category: database
type:
- oracle
method: push_account
i18n:
Oracle account push:
zh: Oracle 账号推送
ja: Oracle アカウントのプッシュ

View File

@ -1,6 +1,11 @@
id: push_account_postgresql
name: Push account for PostgreSQL
name: "{{ 'PostgreSQL account push' | trans }}"
category: database
type:
- postgresql
method: push_account
i18n:
PostgreSQL account push:
zh: PostgreSQL 账号推送
ja: PostgreSQL アカウントのプッシュ

View File

@ -1,6 +1,11 @@
id: push_account_sqlserver
name: Push account for SQLServer
name: "{{ 'SQLServer account push' | trans }}"
category: database
type:
- sqlserver
method: push_account
i18n:
SQLServer account push:
zh: SQLServer 账号推送
ja: SQLServer アカウントのプッシュ

View File

@ -1,5 +1,5 @@
id: push_account_aix
name: Push account for aix
name: "{{ 'Aix account push' | trans }}"
category: host
type:
- AIX
@ -22,3 +22,8 @@ params:
default: ''
help_text: '请输入用户组,多个用户组使用逗号分隔(需填写已存在的用户组)'
i18n:
Aix account push:
zh: Aix 账号推送
ja: Aix アカウントのプッシュ

View File

@ -1,5 +1,5 @@
id: push_account_posix
name: Push account for posix
name: "{{ 'Posix account push' | trans }}"
category: host
type:
- unix
@ -23,3 +23,8 @@ params:
label: '用户组'
default: ''
help_text: '请输入用户组,多个用户组使用逗号分隔(需填写已存在的用户组)'
i18n:
Posix account push:
zh: Posix 账号推送
ja: Posix アカウントのプッシュ

View File

@ -1,5 +1,5 @@
id: push_account_local_windows
name: Push account local account for Windows
name: "{{ 'Windows account push' | trans }}"
version: 1
method: push_account
category: host
@ -11,3 +11,8 @@ params:
label: '用户组'
default: 'Users,Remote Desktop Users'
help_text: '请输入用户组,多个用户组使用逗号分隔(需填写已存在的用户组)'
i18n:
Windows account push:
zh: Windows 账号推送
ja: Windows アカウントのプッシュ

View File

@ -1,8 +1,13 @@
id: verify_account_by_ssh
name: Verify account by SSH
name: "{{ 'SSH account verify' | trans }}"
category:
- device
- host
type:
- all
method: verify_account
i18n:
SSH account verify:
zh: SSH 账号验证
ja: SSH アカウントの検証

View File

@ -1,6 +1,11 @@
id: verify_account_mongodb
name: Verify account from MongoDB
name: "{{ 'MongoDB account verify' | trans }}"
category: database
type:
- mongodb
method: verify_account
i18n:
MongoDB account verify:
zh: MongoDB 账号验证
ja: MongoDB アカウントの検証

View File

@ -1,7 +1,12 @@
id: verify_account_mysql
name: Verify account from MySQL
name: "{{ 'MySQL account verify' | trans }}"
category: database
type:
- mysql
- mariadb
method: verify_account
i18n:
MySQL account verify:
zh: MySQL 账号验证
ja: MySQL アカウントの検証

View File

@ -1,6 +1,11 @@
id: verify_account_oracle
name: Verify account from Oracle
name: "{{ 'Oracle account verify' | trans }}"
category: database
type:
- oracle
method: verify_account
i18n:
Oracle account verify:
zh: Oracle 账号验证
ja: Oracle アカウントの検証

View File

@ -1,6 +1,11 @@
id: verify_account_postgresql
name: Verify account for PostgreSQL
name: "{{ 'PostgreSQL account verify' | trans }}"
category: database
type:
- postgresql
method: verify_account
i18n:
PostgreSQL account verify:
zh: PostgreSQL 账号验证
ja: PostgreSQL アカウントの検証

View File

@ -1,6 +1,11 @@
id: verify_account_sqlserver
name: Verify account from SQLServer
name: "{{ 'SQLServer account verify' | trans }}"
category: database
type:
- sqlserver
method: verify_account
i18n:
SQLServer account verify:
zh: SQLServer 账号验证
ja: SQLServer アカウントの検証

View File

@ -1,7 +1,12 @@
id: verify_account_posix
name: Verify posix account
name: "{{ 'Posix account verify' | trans }}"
category: host
type:
- linux
- unix
method: verify_account
i18n:
Posix account verify:
zh: Posix 账号验证
ja: Posix アカウントの検証

View File

@ -1,6 +1,9 @@
- hosts: windows
gather_facts: no
tasks:
- name: Refresh connection
ansible.builtin.meta: reset_connection
- name: Verify account
ansible.windows.win_ping:
vars:

View File

@ -1,7 +1,12 @@
id: verify_account_windows
name: Verify account windows
name: "{{ 'Windows account verify' | trans }}"
version: 1
method: verify_account
category: host
type:
- windows
i18n:
Windows account verify:
zh: Windows 账号验证
ja: Windows アカウントの検証

View File

@ -4,7 +4,6 @@ from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('accounts', '0009_account_usernames_to_ids'),
]
@ -15,4 +14,9 @@ class Migration(migrations.Migration):
name='is_sync_account',
field=models.BooleanField(blank=True, default=False, verbose_name='Is sync account'),
),
migrations.AddField(
model_name='account',
name='source_id',
field=models.CharField(max_length=128, null=True, blank=True, verbose_name='Source ID'),
),
]

View File

@ -1,18 +0,0 @@
# Generated by Django 3.2.17 on 2023-03-23 07:08
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('accounts', '0010_gatheraccountsautomation_is_sync_account'),
]
operations = [
migrations.AddField(
model_name='account',
name='source_id',
field=models.CharField(max_length=128, null=True, blank=True, verbose_name='Source ID'),
),
]

View File

@ -14,7 +14,7 @@ from assets.models import Asset
from common.serializers import SecretReadableMixin
from common.serializers.fields import ObjectRelatedField, LabeledChoiceField
from common.utils import get_logger
from .base import BaseAccountSerializer
from .base import BaseAccountSerializer, AuthValidateMixin
logger = get_logger(__name__)
@ -34,6 +34,7 @@ class AccountCreateUpdateSerializerMixin(serializers.Serializer):
choices=AccountInvalidPolicy.choices, default=AccountInvalidPolicy.ERROR,
write_only=True, label=_('Exist policy')
)
_template = None
class Meta:
fields = ['template', 'push_now', 'params', 'on_invalid']
@ -52,14 +53,18 @@ class AccountCreateUpdateSerializerMixin(serializers.Serializer):
for data in initial_data:
if not data.get('asset') and not self.instance:
raise serializers.ValidationError({'asset': 'Asset is required'})
raise serializers.ValidationError({'asset': UniqueTogetherValidator.missing_message})
asset = data.get('asset') or self.instance.asset
self.from_template_if_need(data)
self.set_uniq_name_if_need(data, asset)
def to_internal_value(self, data):
self.from_template_if_need(data)
return super().to_internal_value(data)
def set_uniq_name_if_need(self, initial_data, asset):
name = initial_data.get('name')
if name is None:
if name is not None:
return
if not name:
name = initial_data.get('username')
@ -69,11 +74,14 @@ class AccountCreateUpdateSerializerMixin(serializers.Serializer):
name = name + '_' + uuid.uuid4().hex[:4]
initial_data['name'] = name
@staticmethod
def from_template_if_need(initial_data):
template_id = initial_data.get('template')
def from_template_if_need(self, initial_data):
if isinstance(initial_data, str):
return
template_id = initial_data.pop('template', None)
if not template_id:
return
if isinstance(template_id, (str, uuid.UUID)):
template = AccountTemplate.objects.filter(id=template_id).first()
else:
@ -81,6 +89,7 @@ class AccountCreateUpdateSerializerMixin(serializers.Serializer):
if not template:
raise serializers.ValidationError({'template': 'Template not found'})
self._template = template
# Set initial data from template
ignore_fields = ['id', 'date_created', 'date_updated', 'org_id']
field_names = [
@ -105,8 +114,9 @@ class AccountCreateUpdateSerializerMixin(serializers.Serializer):
_validators = super().get_validators()
if getattr(self, 'initial_data', None) is None:
return _validators
on_invalid = self.initial_data.get('on_invalid')
if on_invalid == AccountInvalidPolicy.ERROR:
if on_invalid == AccountInvalidPolicy.ERROR and not self.parent:
return _validators
_validators = [v for v in _validators if not isinstance(v, UniqueTogetherValidator)]
return _validators
@ -137,20 +147,17 @@ class AccountCreateUpdateSerializerMixin(serializers.Serializer):
else:
raise serializers.ValidationError('Account already exists')
def validate(self, attrs):
attrs = super().validate(attrs)
if self.instance:
return attrs
template = attrs.pop('template', None)
if template:
attrs['source'] = Source.TEMPLATE
attrs['source_id'] = str(template.id)
return attrs
def generate_source_data(self, validated_data):
template = self._template
if template is None:
return
validated_data['source'] = Source.TEMPLATE
validated_data['source_id'] = str(template.id)
def create(self, validated_data):
push_now = validated_data.pop('push_now', None)
params = validated_data.pop('params', None)
self.generate_source_data(validated_data)
instance, stat = self.do_create(validated_data)
self.push_account_if_need(instance, push_now, params, stat)
return instance
@ -228,7 +235,9 @@ class AssetAccountBulkSerializerResultSerializer(serializers.Serializer):
changed = serializers.BooleanField(read_only=True, label=_('Changed'))
class AssetAccountBulkSerializer(AccountCreateUpdateSerializerMixin, serializers.ModelSerializer):
class AssetAccountBulkSerializer(
AccountCreateUpdateSerializerMixin, AuthValidateMixin, serializers.ModelSerializer
):
assets = serializers.PrimaryKeyRelatedField(queryset=Asset.objects, many=True, label=_('Assets'))
class Meta:
@ -264,7 +273,7 @@ class AssetAccountBulkSerializer(AccountCreateUpdateSerializerMixin, serializers
@staticmethod
def _handle_update_create(vd, lookup):
ori = Account.objects.filter(**lookup).first()
if ori and ori.secret == vd['secret']:
if ori and ori.secret == vd.get('secret'):
return ori, False, 'skipped'
instance, value = Account.objects.update_or_create(defaults=vd, **lookup)
@ -366,6 +375,7 @@ class AssetAccountBulkSerializer(AccountCreateUpdateSerializerMixin, serializers
def create(self, validated_data):
push_now = validated_data.pop('push_now', False)
self.generate_source_data(validated_data)
results = self.perform_bulk_create(validated_data)
self.push_accounts_if_need(results, push_now)
for res in results:
@ -382,6 +392,7 @@ class AccountSecretSerializer(SecretReadableMixin, AccountSerializer):
class AccountHistorySerializer(serializers.ModelSerializer):
secret_type = LabeledChoiceField(choices=SecretType.choices, label=_('Secret type'))
id = serializers.IntegerField(label=_('ID'), source='history_id', read_only=True)
class Meta:
model = Account.history.model

View File

@ -1,3 +1,6 @@
from django.db.transaction import atomic
from django.db.utils import IntegrityError
from accounts.models import AccountTemplate, Account
from assets.models import Asset
from common.serializers import SecretReadableMixin
@ -9,37 +12,67 @@ class AccountTemplateSerializer(BaseAccountSerializer):
model = AccountTemplate
@staticmethod
def bulk_update_accounts(instance, diff):
def account_save(data, account):
for field, value in data.items():
setattr(account, field, value)
try:
account.save(update_fields=list(data.keys()))
except IntegrityError:
pass
# TODO 数据库访问的太多了 后期优化
@atomic()
def bulk_update_accounts(self, instance, diff):
accounts = Account.objects.filter(source_id=instance.id)
if not accounts:
return
secret_type = diff.pop('secret_type', None)
diff.pop('secret', None)
name = diff.pop('name', None)
username = diff.pop('username', None)
secret_type = diff.pop('secret_type', None)
update_accounts = []
for account in accounts:
for field, value in diff.items():
setattr(account, field, value)
update_accounts.append(account)
if update_accounts:
Account.objects.bulk_update(update_accounts, diff.keys())
if secret_type is None:
return
if name:
for account in accounts:
data = {'name': name}
self.account_save(data, account)
update_accounts = []
if secret_type and username:
asset_ids_supports = self.get_asset_ids_supports(accounts, secret_type)
for account in accounts:
asset_id = account.asset_id
if asset_id not in asset_ids_supports:
data = {'username': username}
self.account_save(data, account)
continue
data = {'username': username, 'secret_type': secret_type, 'secret': instance.secret}
self.account_save(data, account)
elif secret_type:
asset_ids_supports = self.get_asset_ids_supports(accounts, secret_type)
for account in accounts:
asset_id = account.asset_id
if asset_id not in asset_ids_supports:
continue
data = {'secret_type': secret_type, 'secret': instance.secret}
self.account_save(data, account)
elif username:
for account in accounts:
data = {'username': username}
self.account_save(data, account)
@staticmethod
def get_asset_ids_supports(accounts, secret_type):
asset_ids = accounts.values_list('asset_id', flat=True)
secret_type_supports = Asset.get_secret_type_assets(asset_ids, secret_type)
asset_ids_supports = [asset.id for asset in secret_type_supports]
for account in accounts:
asset_id = account.asset_id
if asset_id not in asset_ids_supports:
continue
account.secret_type = secret_type
account.secret = instance.secret
update_accounts.append(account)
if update_accounts:
Account.objects.bulk_update(update_accounts, ['secret', 'secret_type'])
return [asset.id for asset in secret_type_supports]
def update(self, instance, validated_data):
diff = {

View File

@ -1,7 +1,7 @@
from django.core.validators import MinValueValidator, MaxValueValidator
from django.db import models
from django.db.models import Q
from django.utils.translation import ugettext_lazy as _
from django.utils.translation import gettext_lazy as _
from common.db.models import JMSBaseModel
from common.utils import contains_ip

View File

@ -7,7 +7,7 @@ from orgs.models import Organization
from users.models import User
common_help_text = _(
"Format for comma-delimited string, with * indicating a match all. "
"With * indicating a match all. "
)
@ -22,7 +22,7 @@ class ACLUsersSerializer(serializers.Serializer):
class ACLAssestsSerializer(serializers.Serializer):
address_group_help_text = _(
"Format for comma-delimited string, with * indicating a match all. "
"With * indicating a match all. "
"Such as: "
"192.168.10.1, 192.168.1.0/24, 10.1.1.1-10.1.1.20, 2001:db8:2de::e13, 2001:db8:1a:1110::/64"
" (Domain name support)"

View File

@ -1,19 +1,20 @@
from django.utils.translation import ugettext as _
from rest_framework import serializers
from common.serializers.fields import ObjectRelatedField, LabeledChoiceField
from common.serializers import BulkModelSerializer, MethodSerializer
from common.serializers.fields import ObjectRelatedField, LabeledChoiceField
from jumpserver.utils import has_valid_xpack_license
from users.models import User
from .rules import RuleSerializer
from ..models import LoginACL
from ..models.base import ActionChoices
__all__ = [
"LoginACLSerializer",
]
common_help_text = _(
"Format for comma-delimited string, with * indicating a match all. "
"With * indicating a match all. "
)
@ -22,7 +23,7 @@ class LoginACLSerializer(BulkModelSerializer):
reviewers = ObjectRelatedField(
queryset=User.objects, label=_("Reviewers"), many=True, required=False
)
action = LabeledChoiceField(choices=LoginACL.ActionChoices.choices, label=_('Action'))
action = LabeledChoiceField(choices=ActionChoices.choices, label=_('Action'))
reviewers_amount = serializers.IntegerField(
read_only=True, source="reviewers.count", label=_("Reviewers amount")
)
@ -55,7 +56,7 @@ class LoginACLSerializer(BulkModelSerializer):
choices = action.choices
if not has_valid_xpack_license():
choices.pop(LoginACL.ActionChoices.review, None)
action.choices = choices
action._choices = choices
def get_rules_serializer(self):
return RuleSerializer()

View File

@ -22,7 +22,7 @@ def ip_group_child_validator(ip_group_child):
ip_group_help_text = _(
'Format for comma-delimited string, with * indicating a match all. '
'With * indicating a match all. '
'Such as: '
'192.168.10.1, 192.168.1.0/24, 10.1.1.1-10.1.1.20, 2001:db8:2de::e13, 2001:db8:1a:1110::/64 '
)

View File

@ -96,7 +96,7 @@ class AssetViewSet(SuggestionMixin, NodeFilterMixin, OrgBulkModelViewSet):
model = Asset
filterset_class = AssetFilterSet
search_fields = ("name", "address")
ordering = ("name", "connectivity")
ordering_fields = ('name', 'connectivity', 'platform', 'date_updated')
serializer_classes = (
("default", serializers.AssetSerializer),
("platform", serializers.PlatformSerializer),
@ -111,7 +111,6 @@ class AssetViewSet(SuggestionMixin, NodeFilterMixin, OrgBulkModelViewSet):
("gathered_info", "assets.view_asset"),
)
extra_filter_backends = [LabelFilterBackend, IpInFilterBackend, NodeFilterBackend]
skip_assets = []
def get_serializer_class(self):
cls = super().get_serializer_class()
@ -144,6 +143,7 @@ class AssetViewSet(SuggestionMixin, NodeFilterMixin, OrgBulkModelViewSet):
def filter_bulk_update_data(self):
bulk_data = []
skip_assets = []
for data in self.request.data:
pk = data.get('id')
platform = data.get('platform')
@ -155,16 +155,16 @@ class AssetViewSet(SuggestionMixin, NodeFilterMixin, OrgBulkModelViewSet):
if platform.type == asset.type:
bulk_data.append(data)
continue
self.skip_assets.append(asset)
return bulk_data
skip_assets.append(asset)
return bulk_data, skip_assets
def bulk_update(self, request, *args, **kwargs):
bulk_data = self.filter_bulk_update_data()
bulk_data, skip_assets = self.filter_bulk_update_data()
request._full_data = bulk_data
response = super().bulk_update(request, *args, **kwargs)
if response.status_code == HTTP_200_OK and self.skip_assets:
if response.status_code == HTTP_200_OK and skip_assets:
user = request.user
BulkUpdatePlatformSkipAssetUserMsg(user, self.skip_assets).publish()
BulkUpdatePlatformSkipAssetUserMsg(user, skip_assets).publish()
return response

View File

@ -71,7 +71,7 @@ class PlatformAutomationMethodsApi(generics.ListAPIView):
def generate_serializer_fields(self):
data = self.automation_methods()
fields = {
i['id']: i['params_serializer']()
i['id']: i['params_serializer'](label=i['name'])
if i['params_serializer'] else None
for i in data
}

View File

@ -1,6 +1,11 @@
id: gather_facts_mongodb
name: Gather facts from MongoDB
name: "{{ 'Gather facts from MongoDB' | trans }}"
category: database
type:
- mongodb
method: gather_facts
i18n:
Gather facts from MongoDB:
zh: 从 MongoDB 获取信息
en: Gather facts from MongoDB
ja: MongoDBから事実を取得する

View File

@ -1,7 +1,12 @@
id: gather_facts_mysql
name: Gather facts from MySQL
name: "{{ 'Gather facts from MySQL' | trans }}"
category: database
type:
- mysql
- mariadb
method: gather_facts
i18n:
Gather facts from MySQL:
zh: 从 MySQL 获取信息
en: Gather facts from MySQL
ja: MySQLから事実を取得する

View File

@ -1,6 +1,11 @@
id: gather_facts_oracle
name: Gather facts from Oracle
name: "{{ 'Gather facts from Oracle' | trans }}"
category: database
type:
- oracle
method: gather_facts
i18n:
Gather facts from Oracle:
zh: 从 Oracle 获取信息
en: Gather facts from Oracle
ja: Oracleから事実を取得する

View File

@ -1,6 +1,11 @@
id: gather_facts_postgresql
name: Gather facts for PostgreSQL
name: "{{ 'Gather facts for PostgreSQL' | trans }}"
category: database
type:
- postgresql
method: gather_facts
i18n:
Gather facts for PostgreSQL:
zh: 从 PostgreSQL 获取信息
en: Gather facts for PostgreSQL
ja: PostgreSQLから事実を取得する

View File

@ -1,7 +1,12 @@
id: gather_facts_posix
name: Gather posix facts
name: "{{ 'Gather posix facts' | trans }}"
category: host
type:
- linux
- unix
method: gather_facts
i18n:
Gather posix facts:
zh: 从 Posix 主机获取信息
en: Gather posix facts
ja: Posixから事実を取得する

View File

@ -1,7 +1,12 @@
id: gather_facts_windows
name: Gather facts windows
name: "{{ 'Gather facts windows' | trans }}"
version: 1
method: gather_facts
category: host
type:
- windows
i18n:
Gather facts windows:
zh: 从 Windows 获取信息
en: Gather facts windows
ja: Windowsから事実を取得する

View File

@ -2,7 +2,7 @@ import json
import os
from functools import partial
import yaml
from common.utils.yml import yaml_load_with_i18n
def check_platform_method(manifest, manifest_path):
@ -40,7 +40,8 @@ def get_platform_automation_methods(path):
continue
with open(path, 'r') as f:
manifest = yaml.safe_load(f)
print("path: ", path)
manifest = yaml_load_with_i18n(f)
check_platform_method(manifest, path)
manifest['dir'] = os.path.dirname(path)
manifest['params_serializer'] = generate_serializer(manifest)

View File

@ -1,6 +1,11 @@
id: mongodb_ping
name: Ping MongoDB
name: "{{ 'Ping MongoDB' | trans }}"
category: database
type:
- mongodb
method: ping
i18n:
Ping MongoDB:
zh: 测试 MongoDB 可连接性
en: Ping MongoDB
ja: MongoDBにPingする

View File

@ -1,7 +1,12 @@
id: mysql_ping
name: Ping MySQL
name: "{{ 'Ping MySQL' | trans }}"
category: database
type:
- mysql
- mariadb
method: ping
i18n:
Ping MySQL:
zh: 测试 MySQL 可连接性
en: Ping MySQL
ja: MySQLにPingする

View File

@ -1,6 +1,11 @@
id: oracle_ping
name: Ping Oracle
name: "{{ 'Ping Oracle' | trans }}"
category: database
type:
- oracle
method: ping
i18n:
Ping Oracle:
zh: 测试 Oracle 可连接性
en: Ping Oracle
ja: OracleにPingする

View File

@ -1,6 +1,11 @@
id: ping_postgresql
name: Ping PostgreSQL
name: "{{ 'Ping PostgreSQL' | trans }}"
category: database
type:
- postgresql
method: ping
i18n:
Ping PostgreSQL:
zh: 测试 PostgreSQL 可连接性
en: Ping PostgreSQL
ja: PostgreSQLにPingする

View File

@ -1,6 +1,11 @@
id: sqlserver_ping
name: Ping SQLServer
name: "{{ 'Ping SQLServer' | trans }}"
category: database
type:
- sqlserver
method: ping
i18n:
Ping SQLServer:
zh: 测试 SQLServer 可连接性
en: Ping SQLServer
ja: SQLServerにPingする

View File

@ -1,7 +1,13 @@
id: posix_ping
name: Posix ping
name: "{{ 'Posix ping' | trans }}"
category: host
type:
- linux
- unix
method: ping
i18n:
Posix ping:
zh: 测试 Posix 可连接性
en: Posix ping
ja: Posix ピング

View File

@ -1,5 +1,8 @@
- hosts: windows
gather_facts: no
tasks:
- name: Refresh connection
ansible.builtin.meta: reset_connection
- name: Windows ping
ansible.builtin.win_ping:

View File

@ -1,7 +1,12 @@
id: win_ping
name: Windows ping
name: "{{ 'Windows ping' | trans }}"
version: 1
method: ping
category: host
type:
- windows
i18n:
Windows ping:
zh: 测试 Windows 可连接性
en: Windows ping
ja: Windows ピング

View File

@ -1,3 +1,4 @@
import json
from collections import defaultdict
from copy import deepcopy
@ -270,7 +271,7 @@ class AllTypes(ChoicesMixin):
return data
@classmethod
def create_or_update_by_platform_data(cls, name, platform_data, platform_cls=None):
def create_or_update_by_platform_data(cls, platform_data, platform_cls=None):
# 不直接用 Platform 是因为可能在 migrations 中使用
from assets.models import Platform
if platform_cls is None:
@ -279,6 +280,7 @@ class AllTypes(ChoicesMixin):
automation_data = platform_data.pop('automation', {})
protocols_data = platform_data.pop('protocols', [])
name = platform_data['name']
platform, created = platform_cls.objects.update_or_create(
defaults=platform_data, name=name
)
@ -294,7 +296,6 @@ class AllTypes(ChoicesMixin):
platform.protocols.all().delete()
for p in protocols_data:
p.pop('primary', None)
platform.protocols.create(**p)
@classmethod
@ -302,20 +303,20 @@ class AllTypes(ChoicesMixin):
if platform_cls is None:
platform_cls = cls
print("\n\tCreate internal platforms")
# print("\n\tCreate internal platforms")
for category, type_cls in cls.category_types():
print("\t## Category: {}".format(category.label))
# print("\t## Category: {}".format(category.label))
data = type_cls.internal_platforms()
for tp, platform_datas in data.items():
print("\t >> Type: {}".format(tp.label))
# print("\t >> Type: {}".format(tp.label))
default_platform_data = cls.get_type_default_platform(category, tp)
default_automation = default_platform_data.pop('automation', {})
default_protocols = default_platform_data.pop('protocols', [])
for d in platform_datas:
name = d['name']
print("\t - Platform: {}".format(name))
# print("\t - Platform: {}".format(name))
_automation = d.pop('automation', {})
_protocols = d.pop('_protocols', [])
_protocols_setting = d.pop('protocols_setting', {})
@ -335,7 +336,8 @@ class AllTypes(ChoicesMixin):
'automation': {**default_automation, **_automation},
'protocols': protocols_data
}
cls.create_or_update_by_platform_data(name, platform_data, platform_cls=platform_cls)
print(json.dumps(platform_data, indent=4))
# cls.create_or_update_by_platform_data(platform_data, platform_cls=platform_cls)
@classmethod
def update_user_create_platforms(cls, platform_cls):
@ -350,5 +352,6 @@ class AllTypes(ChoicesMixin):
for platform in user_platforms:
print("\t- Update platform: {}".format(platform.name))
platform_data = cls.get_type_default_platform(platform.category, platform.type)
cls.create_or_update_by_platform_data(platform.name, platform_data, platform_cls=platform_cls)
platform_data['name'] = platform.name
cls.create_or_update_by_platform_data(platform_data, platform_cls=platform_cls)
user_platforms.update(internal=False)

File diff suppressed because it is too large Load Diff

View File

@ -2,8 +2,6 @@
from django.db import migrations, models
from assets.const import AllTypes
def migrate_platform_charset(apps, schema_editor):
platform_model = apps.get_model('assets', 'Platform')
@ -15,6 +13,9 @@ def migrate_platform_protocol_primary(apps, schema_editor):
platforms = platform_model.objects.all()
for platform in platforms:
p = platform.protocols.filter(primary=True).first()
if p:
continue
p = platform.protocols.first()
if not p:
continue
@ -22,9 +23,57 @@ def migrate_platform_protocol_primary(apps, schema_editor):
p.save()
def migrate_internal_platforms(apps, schema_editor):
def migrate_winrm_for_win(apps, *args):
platform_cls = apps.get_model('assets', 'Platform')
AllTypes.create_or_update_internal_platforms(platform_cls)
windows_name = ['Windows', 'Windows-TLS', 'Windows-RDP']
windows = platform_cls.objects.filter(name__in=windows_name)
for platform in windows:
if platform.protocols.filter(name='winrm').exists():
continue
data = {
'name': 'winrm',
'port': 5985,
'primary': False,
'public': False,
'required': False,
'default': False,
'setting': {"use_ssl": False}
}
platform.protocols.create(**data)
def migrate_device_platform_automation(apps, *args):
platform_cls = apps.get_model('assets', 'Platform')
names = ['General', 'Cisco', 'H3C', 'Huawei']
platforms = platform_cls.objects.filter(name__in=names, category='device')
for platform in platforms:
automation = getattr(platform, 'automation', None)
if not automation:
continue
automation.ansible_config = {
"ansible_connection": "local",
"first_connect_delay": 0.5,
}
automation.ansible_enabled = True
automation.change_secret_enabled = True
automation.change_secret_method = "change_secret_by_ssh"
automation.ping_enabled = True
automation.ping_method = "ping_by_ssh"
automation.verify_account_enabled = True
automation.verify_account_method = "verify_account_by_ssh"
automation.save()
def migrate_web_login_button_error(apps, *args):
protocol_cls = apps.get_model('assets', 'PlatformProtocol')
protocols = protocol_cls.objects.filter(name='http')
for protocol in protocols:
submit_selector = protocol.setting.get('submit_selector', '')
submit_selector = submit_selector.replace('id=longin_button', 'id=login_button')
protocol.setting['submit_selector'] = submit_selector
protocol.save()
class Migration(migrations.Migration):
@ -45,5 +94,7 @@ class Migration(migrations.Migration):
),
migrations.RunPython(migrate_platform_charset),
migrations.RunPython(migrate_platform_protocol_primary),
migrations.RunPython(migrate_internal_platforms),
migrations.RunPython(migrate_winrm_for_win),
migrations.RunPython(migrate_device_platform_automation),
migrations.RunPython(migrate_web_login_button_error),
]

View File

@ -0,0 +1,49 @@
# Generated by Django 3.2.17 on 2023-04-17 06:32
from django.db import migrations, models
def migrate_platform_automation_id(apps, *args):
platform_model = apps.get_model('assets', 'Platform')
for platform in platform_model.objects.all():
if platform.automation:
platform._automation_id = platform.automation.id
platform.save(update_fields=['_automation_id'])
def migrate_automation_platform(apps, *args):
platform_model = apps.get_model('assets', 'Platform')
automation_model = apps.get_model('assets', 'PlatformAutomation')
platforms = platform_model.objects.all()
for platform in platforms:
if not platform._automation_id:
continue
automation = automation_model.objects.filter(id=platform._automation_id).first()
if not automation:
continue
automation.platform = platform
automation.save(update_fields=['platform'])
class Migration(migrations.Migration):
dependencies = [
('assets', '0114_baseautomation_params'),
]
operations = [
migrations.AddField(
model_name='platform',
name='_automation_id',
field=models.UUIDField(editable=False, null=True),
),
migrations.RunPython(migrate_platform_automation_id),
migrations.RemoveField(model_name='platform', name='automation'),
migrations.AddField(
model_name='platformautomation',
name='platform',
field=models.OneToOneField(null=True, on_delete=models.deletion.CASCADE,
related_name='automation', to='assets.platform'),
),
migrations.RunPython(migrate_automation_platform),
migrations.RemoveField(model_name='platform', name='_automation_id'),
]

View File

@ -0,0 +1,29 @@
# Generated by Django 3.2.17 on 2023-04-18 09:26
from django.db import migrations
def update_remote_app_platform(apps, schema_editor):
platform_cls = apps.get_model('assets', 'Platform')
remote_app_host = platform_cls.objects.filter(name='RemoteAppHost').first()
if not remote_app_host:
return
protocols = remote_app_host.protocols.all()
for protocol in protocols:
if protocol.name == 'rdp':
protocol.primary = True
protocol.save()
elif protocol.name == 'ssh':
protocol.required = True
protocol.save()
class Migration(migrations.Migration):
dependencies = [
('assets', '0115_auto_20230417_1425'),
]
operations = [
migrations.RunPython(update_remote_app_platform)
]

View File

@ -19,7 +19,7 @@ class BaseAutomation(PeriodTaskModelMixin, JMSOrgBaseModel):
assets = models.ManyToManyField('assets.Asset', blank=True, verbose_name=_("Assets"))
type = models.CharField(max_length=16, verbose_name=_('Type'))
is_active = models.BooleanField(default=True, verbose_name=_("Is active"))
params = models.JSONField(default=dict, verbose_name=_("Params"))
params = models.JSONField(default=dict, verbose_name=_("Parameters"))
def __str__(self):
return self.name + '@' + str(self.created_by)

View File

@ -72,6 +72,7 @@ class PlatformAutomation(models.Model):
max_length=32, blank=True, null=True, verbose_name=_("Gather facts method")
)
gather_accounts_params = models.JSONField(default=dict, verbose_name=_("Gather facts params"))
platform = models.OneToOneField('Platform', on_delete=models.CASCADE, related_name='automation', null=True)
class Platform(JMSBaseModel):
@ -99,10 +100,6 @@ class Platform(JMSBaseModel):
# 账号有关的
su_enabled = models.BooleanField(default=False, verbose_name=_("Su enabled"))
su_method = models.CharField(max_length=32, blank=True, null=True, verbose_name=_("Su method"))
automation = models.OneToOneField(
PlatformAutomation, on_delete=models.CASCADE, related_name='platform',
blank=True, null=True, verbose_name=_("Automation")
)
custom_fields = models.JSONField(null=True, default=list, verbose_name=_("Custom fields"))
@property

View File

@ -1,8 +1,6 @@
# -*- coding: utf-8 -*-
#
import re
from django.db.models import F
from django.db.transaction import atomic
from django.utils.translation import ugettext_lazy as _
@ -10,9 +8,12 @@ from rest_framework import serializers
from accounts.models import Account
from accounts.serializers import AccountSerializer
from common.serializers import WritableNestedModelSerializer, SecretReadableMixin, CommonModelSerializer, \
MethodSerializer
from common.serializers.dynamic import create_serializer_class
from common.const import UUID_PATTERN
from common.serializers import (
WritableNestedModelSerializer, SecretReadableMixin,
CommonModelSerializer, MethodSerializer
)
from common.serializers.common import DictSerializer
from common.serializers.fields import LabeledChoiceField
from orgs.mixins.serializers import BulkOrgResourceModelSerializer
from ...const import Category, AllTypes
@ -25,8 +26,6 @@ __all__ = [
'AccountSecretSerializer', 'AssetProtocolsPermsSerializer'
]
uuid_pattern = re.compile(r'[0-9a-f]{8}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{12}')
class AssetProtocolsSerializer(serializers.ModelSerializer):
port = serializers.IntegerField(required=False, allow_null=True, max_value=65535, min_value=1)
@ -72,9 +71,13 @@ class AssetPlatformSerializer(serializers.ModelSerializer):
class AssetAccountSerializer(AccountSerializer):
add_org_fields = False
asset = serializers.PrimaryKeyRelatedField(queryset=Asset.objects, required=False, write_only=True)
clone_id: str
clone_id = None
def to_internal_value(self, data):
# 导入时data有时为str
if isinstance(data, str):
return super().to_internal_value(data)
clone_id = data.pop('id', None)
ret = super().to_internal_value(data)
self.clone_id = clone_id
@ -121,12 +124,11 @@ class AssetSerializer(BulkOrgResourceModelSerializer, WritableNestedModelSeriali
protocols = AssetProtocolsSerializer(many=True, required=False, label=_('Protocols'), default=())
accounts = AssetAccountSerializer(many=True, required=False, allow_null=True, write_only=True, label=_('Account'))
nodes_display = serializers.ListField(read_only=False, required=False, label=_("Node path"))
custom_info = MethodSerializer(label=_('Custom info'))
class Meta:
model = Asset
fields_mini = ['id', 'name', 'address']
fields_small = fields_mini + ['custom_info', 'is_active', 'comment']
fields_small = fields_mini + ['is_active', 'comment']
fields_fk = ['domain', 'platform']
fields_m2m = [
'nodes', 'labels', 'protocols',
@ -176,8 +178,12 @@ class AssetSerializer(BulkOrgResourceModelSerializer, WritableNestedModelSeriali
return
category = request.path.strip('/').split('/')[-1].rstrip('s')
field_category = self.fields.get('category')
if not field_category:
return
field_category.choices = Category.filter_choices(category)
field_type = self.fields.get('type')
if not field_type:
return
field_type.choices = AllTypes.filter_choices(category)
@classmethod
@ -189,36 +195,6 @@ class AssetSerializer(BulkOrgResourceModelSerializer, WritableNestedModelSeriali
.annotate(type=F("platform__type"))
return queryset
def get_custom_info_serializer(self):
request = self.context.get('request')
default_field = serializers.DictField(required=False, label=_('Custom info'))
if not request:
return default_field
if self.instance and isinstance(self.instance, list):
return default_field
if not self.instance and uuid_pattern.findall(request.path):
pk = uuid_pattern.findall(request.path)[0]
self.instance = Asset.objects.filter(id=pk).first()
platform = None
if self.instance:
platform = self.instance.platform
elif request.query_params.get('platform'):
platform_id = request.query_params.get('platform')
platform_id = int(platform_id) if platform_id.isdigit() else 0
platform = Platform.objects.filter(id=platform_id).first()
if not platform:
return default_field
custom_fields = platform.custom_fields
if not custom_fields:
return default_field
name = platform.name.title() + 'CustomSerializer'
return create_serializer_class(name, custom_fields)()
@staticmethod
def perform_nodes_display_create(instance, nodes_display):
if not nodes_display:
@ -263,12 +239,13 @@ class AssetSerializer(BulkOrgResourceModelSerializer, WritableNestedModelSeriali
nodes_display = self.initial_data.get('nodes_display')
if nodes_display:
return nodes
default_node = Node.org_root()
request = self.context.get('request')
if not request:
return []
return [default_node]
node_id = request.query_params.get('node_id')
if not node_id:
return []
return [default_node]
nodes = Node.objects.filter(id=node_id)
return nodes
@ -331,8 +308,8 @@ class DetailMixin(serializers.Serializer):
def get_instance(self):
request = self.context.get('request')
if not self.instance and uuid_pattern.findall(request.path):
pk = uuid_pattern.findall(request.path)[0]
if not self.instance and UUID_PATTERN.findall(request.path):
pk = UUID_PATTERN.findall(request.path)[0]
self.instance = Asset.objects.filter(id=pk).first()
return self.instance
@ -350,19 +327,19 @@ class DetailMixin(serializers.Serializer):
category = request.query_params.get('category')
else:
instance = self.get_instance()
category = instance.category
category = instance.category if instance else 'host'
return category
def get_gathered_info_serializer(self):
category = self.get_category()
from .info.gathered import category_gathered_serializer_map
serializer_cls = category_gathered_serializer_map.get(category, serializers.DictField)
serializer_cls = category_gathered_serializer_map.get(category, DictSerializer)
return serializer_cls()
def get_spec_info_serializer(self):
category = self.get_category()
from .info.spec import category_spec_serializer_map
serializer_cls = category_spec_serializer_map.get(category, serializers.DictField)
serializer_cls = category_spec_serializer_map.get(category, DictSerializer)
return serializer_cls()

View File

@ -1,9 +1,48 @@
from assets.models import Custom
from django.db.models import QuerySet
from django.utils.translation import gettext_lazy as _
from assets.models import Custom, Platform, Asset
from common.const import UUID_PATTERN
from common.serializers import MethodSerializer, create_serializer_class
from common.serializers.common import DictSerializer
from .common import AssetSerializer
__all__ = ['CustomSerializer']
class CustomSerializer(AssetSerializer):
custom_info = MethodSerializer(label=_('Custom info'))
class Meta(AssetSerializer.Meta):
model = Custom
fields = AssetSerializer.Meta.fields + ['custom_info']
def get_custom_info_serializer(self):
request = self.context.get('request')
default_field = DictSerializer()
if not request:
return default_field
if self.instance and isinstance(self.instance, (QuerySet, list)):
return default_field
if not self.instance and UUID_PATTERN.findall(request.path):
pk = UUID_PATTERN.findall(request.path)[0]
self.instance = Asset.objects.filter(id=pk).first()
platform = None
if self.instance:
platform = self.instance.platform
elif request.query_params.get('platform'):
platform_id = request.query_params.get('platform')
platform_id = int(platform_id) if platform_id.isdigit() else 0
platform = Platform.objects.filter(id=platform_id).first()
if not platform:
return default_field
custom_fields = platform.custom_fields
if not custom_fields:
return default_field
name = platform.name.title() + 'CustomSerializer'
return create_serializer_class(name, custom_fields)()

View File

@ -1,10 +1,11 @@
# -*- coding: utf-8 -*-
from urllib.parse import urlencode
from urllib.parse import urlencode, urlparse
from kubernetes import client
from kubernetes.client import api_client
from kubernetes.client.api import core_v1_api
from kubernetes.client.exceptions import ApiException
from sshtunnel import SSHTunnelForwarder, BaseSSHTunnelForwarderError
from common.utils import get_logger
from ..const import CloudTypes, Category
@ -13,16 +14,15 @@ logger = get_logger(__file__)
class KubernetesClient:
def __init__(self, url, token, proxy=None):
self.url = url
self.token = token
self.proxy = proxy
def __init__(self, asset, token):
self.url = asset.address
self.token = token or ''
self.server = self.get_gateway_server(asset)
@property
def api(self):
configuration = client.Configuration()
configuration.host = self.url
configuration.proxy = self.proxy
configuration.verify_ssl = False
configuration.api_key = {"authorization": "Bearer " + self.token}
c = api_client.ApiClient(configuration=configuration)
@ -51,27 +51,43 @@ class KubernetesClient:
return containers
@staticmethod
def get_proxy_url(asset):
if not asset.domain:
return None
def get_gateway_server(asset):
gateway = None
if not asset.is_gateway and asset.domain:
gateway = asset.domain.select_gateway()
gateway = asset.domain.select_gateway()
if not gateway:
return None
return f'{gateway.address}:{gateway.port}'
return
@classmethod
def run(cls, asset, secret, tp, *args):
k8s_url = f'{asset.address}'
proxy_url = cls.get_proxy_url(asset)
k8s = cls(k8s_url, secret, proxy=proxy_url)
remote_bind_address = (
urlparse(asset.address).hostname,
urlparse(asset.address).port
)
server = SSHTunnelForwarder(
(gateway.address, gateway.port),
ssh_username=gateway.username,
ssh_password=gateway.password,
ssh_pkey=gateway.private_key_path,
remote_bind_address=remote_bind_address
)
try:
server.start()
except BaseSSHTunnelForwarderError:
err_msg = 'Gateway is not active: %s' % asset.get('name', '')
print('\033[31m %s \033[0m\n' % err_msg)
return server
def run(self, tp, *args):
func_name = f'get_{tp}s'
data = []
if hasattr(k8s, func_name):
if hasattr(self, func_name):
try:
data = getattr(k8s, func_name)(*args)
data = getattr(self, func_name)(*args)
except ApiException as e:
logger.error(e.reason)
if self.server:
self.server.stop()
return data
@ -131,10 +147,11 @@ class KubernetesTree:
def async_tree_node(self, namespace, pod):
tree = []
k8s_client = KubernetesClient(self.asset, self.secret)
if pod:
tp = 'container'
containers = KubernetesClient.run(
self.asset, self.secret, tp, namespace, pod
containers = k8s_client.run(
tp, namespace, pod
)
for container in containers:
container_node = self.as_container_tree_node(
@ -143,13 +160,13 @@ class KubernetesTree:
tree.append(container_node)
elif namespace:
tp = 'pod'
pods = KubernetesClient.run(self.asset, self.secret, tp, namespace)
pods = k8s_client.run(tp, namespace)
for pod in pods:
pod_node = self.as_pod_tree_node(namespace, pod, tp)
tree.append(pod_node)
else:
tp = 'namespace'
namespaces = KubernetesClient.run(self.asset, self.secret, tp)
namespaces = k8s_client.run(tp)
for namespace in namespaces:
namespace_node = self.as_namespace_node(namespace, tp)
tree.append(namespace_node)

View File

@ -1,13 +1,12 @@
# -*- coding: utf-8 -*-
#
from django.contrib.auth import logout as auth_logout
from rest_framework.permissions import AllowAny
from rest_framework.response import Response
from rest_framework.views import APIView
from rest_framework.permissions import AllowAny
from common.utils import get_logger
from .. import errors, mixins
from django.contrib.auth import logout as auth_logout
__all__ = ['TicketStatusApi']
logger = get_logger(__name__)
@ -26,8 +25,10 @@ class TicketStatusApi(mixins.AuthMixin, APIView):
reason = e.msg
username = e.username
self.send_auth_signal(success=False, username=username, reason=reason)
auth_ticket_id = request.session.pop('auth_ticket_id', '')
# 若为三方登录,此时应退出登录
auth_logout(request)
request.session['auth_ticket_id'] = auth_ticket_id
return Response(e.as_data(), status=200)
except errors.NeedMoreInfoError as e:
return Response(e.as_data(), status=200)

View File

@ -119,7 +119,7 @@ class OAuth2Backend(JMSModelBackend):
headers = {
'Accept': 'application/json',
'Authorization': 'token {}'.format(response_data.get('access_token', ''))
'Authorization': 'Bearer {}'.format(response_data.get('access_token', ''))
}
logger.debug(log_prompt.format('Get userinfo endpoint'))

View File

@ -1,3 +1,5 @@
import re
from django.utils.translation import ugettext_lazy as _
create_success_msg = _("%(name)s was created successfully")
@ -9,3 +11,4 @@ KEY_CACHE_RESOURCE_IDS = "RESOURCE_IDS_{}"
# AD User AccountDisable
# https://docs.microsoft.com/en-us/troubleshoot/windows-server/identity/useraccountcontrol-manipulate-account-properties
LDAP_AD_ACCOUNT_DISABLE = 2
UUID_PATTERN = re.compile(r'[0-9a-f]{8}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{12}')

View File

@ -1,12 +1,12 @@
import json
import uuid
import logging
import uuid
from datetime import datetime
from django.utils.translation import ugettext_lazy as _
from django.utils import timezone as dj_timezone
from django.db import models
from django.conf import settings
from django.db import models
from django.utils import timezone as dj_timezone
from django.utils.translation import ugettext_lazy as _
lazy_type = type(_('ugettext_lazy'))
@ -28,5 +28,5 @@ class ModelJSONFieldEncoder(json.JSONEncoder):
try:
return super().default(obj)
except TypeError:
logging.error('Type error: ', type(obj))
logging.error(f'Type error: {type(obj)}')
return str(obj)

View File

@ -1,3 +1,5 @@
import logging
from django.utils.functional import cached_property
from django.utils.translation import gettext_lazy as _
from drf_writable_nested.serializers import WritableNestedModelSerializer as NestedModelSerializer
@ -35,7 +37,11 @@ class MethodSerializer(serializers.Serializer):
@cached_property
def serializer(self) -> serializers.Serializer:
method = getattr(self.parent, self.method_name)
_serializer = method()
try:
_serializer = method()
except Exception as e:
logging.error(e, exc_info=True)
raise e
# 设置serializer的parent值否则在serializer实例中获取parent会出现断层
setattr(_serializer, 'parent', self.parent)
return _serializer
@ -90,3 +96,17 @@ class WritableNestedModelSerializer(NestedModelSerializer):
class FileSerializer(serializers.Serializer):
file = serializers.FileField(label=_("File"))
class DictSerializer(serializers.Serializer):
def to_representation(self, instance):
# 返回一个包含所有提交字段的 Python 字典
return instance
def to_internal_value(self, data):
# 确保从请求中得到的输入是 Python 字典
if isinstance(data, dict):
return data
else:
raise serializers.ValidationError("无法转换为dict类型")

View File

@ -2,6 +2,7 @@
#
import phonenumbers
from django.core.exceptions import ObjectDoesNotExist
from django.db.models import Model
from django.utils.translation import gettext_lazy as _
from rest_framework import serializers
from rest_framework.fields import ChoiceField, empty
@ -63,7 +64,7 @@ class LabeledChoiceField(ChoiceField):
def to_representation(self, key):
if key is None:
return key
label = self.choice_mapper.get(key)
label = self.choice_mapper.get(key, key)
return {"value": key, "label": label}
def to_internal_value(self, data):
@ -118,11 +119,15 @@ class ObjectRelatedField(serializers.RelatedField):
return data
def to_internal_value(self, data):
queryset = self.get_queryset()
if isinstance(data, Model):
return queryset.get(pk=data.pk)
if not isinstance(data, dict):
pk = data
else:
pk = data.get("id") or data.get("pk") or data.get(self.attrs[0])
queryset = self.get_queryset()
try:
if isinstance(data, bool):
raise TypeError

View File

@ -1,6 +1,7 @@
# -*- coding: utf-8 -*-
#
import logging
import os
import re
from collections import defaultdict
@ -12,6 +13,7 @@ from django.dispatch import receiver
from jumpserver.utils import get_current_request
from .local import thread_local
from .signals import django_ready
pattern = re.compile(r'FROM `(\w+)`')
logger = logging.getLogger("jumpserver.common")
@ -123,3 +125,60 @@ if settings.DEBUG_DEV:
request_finished.connect(on_request_finished_logging_db_query)
else:
request_finished.connect(on_request_finished_release_local)
@receiver(django_ready)
def check_migrations_file_prefix_conflict(*args, **kwargs):
if not settings.DEBUG_DEV:
return
from jumpserver.const import BASE_DIR
print('>>> Check migrations file prefix conflict.', end=' ')
# 指定 app 目录
_dir = BASE_DIR
# 获取所有子目录
sub_dirs = next(os.walk(_dir))[1]
# 记录冲突的文件,元素为 (subdir, file1, file2)
conflict_files = []
# 遍历每个子目录
for subdir in sub_dirs:
# 拼接 migrations 目录路径
migrations_dir = os.path.join(_dir, subdir, 'migrations')
# 判断是否存在 migrations 目录
if not os.path.exists(migrations_dir):
continue
# 获取所有文件名
files = os.listdir(migrations_dir)
# 遍历每个文件名
prefix_file_map = dict()
for file in files:
file = str(file)
# 判断是否为 Python 文件
if not file.endswith('.py'):
continue
if 'squashed' in file:
continue
# file 为文件名
file_prefix = file.split('_')[0]
if file_prefix in prefix_file_map.keys():
conflict_files.append((subdir, file, prefix_file_map.get(file_prefix)))
else:
prefix_file_map[file_prefix] = file
conflict_count = len(conflict_files)
print(f'Conflict count:({conflict_count})')
if not conflict_count:
return
print('='*80)
for conflict_file in conflict_files:
msg_dir = '{:<15}'.format(conflict_file[0])
msg_split = '=> '
msg_left = msg_dir
msg_right1 = msg_split + '{:<80}'.format(conflict_file[1])
msg_right2 = ' ' * len(msg_left) + msg_split + conflict_file[2]
print(f'{msg_left}{msg_right1}\n{msg_right2}\n')
print('='*80)

34
apps/common/utils/yml.py Normal file
View File

@ -0,0 +1,34 @@
import io
import yaml
from django.conf import settings
from jinja2 import Environment
def translate(key, i18n):
lang = settings.LANGUAGE_CODE[:2]
lang_data = i18n.get(key, {})
return lang_data.get(lang, key)
def yaml_load_with_i18n(stream):
ori_text = stream.read()
stream = io.StringIO(ori_text)
yaml_data = yaml.safe_load(stream)
i18n = yaml_data.get('i18n', {})
env = Environment()
env.filters['trans'] = lambda key: translate(key, i18n)
template = env.from_string(ori_text)
yaml_data = template.render()
yaml_f = io.StringIO(yaml_data)
d = yaml.safe_load(yaml_f)
if isinstance(d, dict):
d.pop('i18n', None)
return d
if __name__ == '__main__':
with open('manifest.yml') as f:
data = yaml_load_with_i18n(f)
print(data)

View File

@ -1,3 +1,3 @@
version https://git-lfs.github.com/spec/v1
oid sha256:975e9e264596ef5f7233fc1d2fb45281a5fe13f5a722fc2b9d5c40562ada069d
size 138303
oid sha256:a4ef9ccfeccf8f45c8753bc901ff6efe970486565c3bcf2d46042657ffa49f42
size 139618

File diff suppressed because it is too large Load Diff

View File

@ -1,3 +1,3 @@
version https://git-lfs.github.com/spec/v1
oid sha256:035f9429613b541f229855a7d36c98e5f4736efce54dcd21119660dd6d89d94e
size 114269
oid sha256:2dd0610d610c2660f35d50dc2871ac08cc09080d2503e1080a57d97c47fea471
size 114418

File diff suppressed because it is too large Load Diff

View File

@ -72,9 +72,10 @@ def get_commands(module):
username = module.params['name']
password = module.params['password']
commands = module.params['commands'] or []
login_password = module.params['login_password']
for index, command in enumerate(commands):
commands[index] = command.format(
username=username, password=password
username=username, password=password, login_password=login_password
)
return commands

View File

@ -402,9 +402,11 @@ class JobExecution(JMSOrgBaseModel):
def check_command_acl(self):
for asset in self.current_job.assets.all():
acls = CommandFilterACL.filter_queryset(user=self.creator,
asset=asset,
account_username=self.current_job.runas)
acls = CommandFilterACL.filter_queryset(
user=self.creator,
asset=asset,
is_active=True,
account_username=self.current_job.runas)
for acl in acls:
if self.match_command_group(acl, asset):
break

View File

@ -4,6 +4,7 @@ from django.utils.translation import gettext_lazy as _
from rest_framework import serializers
from assets.models import Node, Asset
from perms.models import PermNode
from perms.utils.user_perm import UserPermAssetUtil
from common.serializers.fields import ReadableHiddenField
from ops.mixin import PeriodTaskSerializerMixin
@ -39,7 +40,12 @@ class JobSerializer(BulkOrgResourceModelSerializer, PeriodTaskSerializerMixin):
user = self.get_request_user()
perm_util = UserPermAssetUtil(user=user)
for node_id in node_ids:
node, node_assets = perm_util.get_node_all_assets(node_id)
if node_id == PermNode.FAVORITE_NODE_KEY:
node_assets = perm_util.get_favorite_assets()
elif node_id == PermNode.UNGROUPED_NODE_KEY:
node_assets = perm_util.get_ungroup_assets()
else:
node, node_assets = perm_util.get_node_all_assets(node_id)
assets.extend(node_assets.exclude(id__in=[asset.id for asset in assets]))
return super().create(validated_data)

View File

@ -146,6 +146,8 @@ only_system_permissions = (
('authentication', '*', '*', '*'),
('tickets', '*', '*', '*'),
('orgs', 'organization', 'view', 'rootorg'),
('terminal', 'applet', '*', '*'),
('terminal', 'applethost', '*', '*'),
)
only_org_permissions = (

View File

@ -1,4 +0,0 @@
- zh:
display_name: Chrome 浏览器
comment: 浏览器打开 URL 页面地址

View File

@ -1,7 +1,7 @@
name: chrome
display_name: Chrome Browser
display_name: "{{ 'Chrome Browser' | trans }}"
version: 0.2
comment: Chrome Browser Open URL Page Address
comment: "{{ 'Chrome Browser Open URL Page Address' | trans }}"
author: JumpServer Team
exec_type: python
update_policy: always
@ -10,3 +10,13 @@ tags:
- web
protocols:
- http
i18n:
Chrome Browser:
en: Chrome Browser
zh: Chrome 浏览器
ja: Chrome ブラウザ
Chrome Browser Open URL Page Address:
en: Chrome Browser Open URL Page Address
zh: Chrome 浏览器打开网页地址
ja: Chrome ブラウザでウェブページを開く

View File

@ -1,3 +0,0 @@
- zh:
display_name: DBeaver Community
comment: 免费的多平台数据库工具,供开发人员、数据库管理员、分析师和所有需要使用数据库的人使用。

View File

@ -1,6 +1,6 @@
name: dbeaver
display_name: DBeaver Community
comment: Free multi-platform database tool for developers, database administrators, analysts and all people who need to work with databases.
display_name: "{{ 'DBeaver Community' | trans }}"
comment: "{{ 'Free multi-platform database tool for developers, database administrators, analysts and all people who need to work with databases.' | trans }}"
version: 0.1
exec_type: python
author: JumpServer Team
@ -14,3 +14,14 @@ protocols:
- postgresql
- sqlserver
- oracle
i18n:
DBeaver Community:
en: DBeaver Community
zh: DBeaver 社区版
ja: DBeaver コミュニティ
Free multi-platform database tool for developers, database administrators, analysts and all people who need to work with databases.:
en: Free multi-platform database tool for developers, database administrators, analysts and all people who need to work with databases.
zh: 免费的多平台数据库工具,供开发人员、数据库管理员、分析师和所有需要使用数据库的人使用。
ja: 開発者、データベース管理者、分析家、およびデータベースを使用する必要があるすべての人のための無料のマルチプラットフォームデータベースツール。

View File

@ -12,6 +12,7 @@ from rest_framework.serializers import ValidationError
from common.db.models import JMSBaseModel
from common.utils import lazyproperty, get_logger
from common.utils.yml import yaml_load_with_i18n
logger = get_logger(__name__)
@ -76,14 +77,14 @@ class Applet(JMSBaseModel):
@staticmethod
def validate_pkg(d):
files = ['manifest.yml', 'icon.png', 'i18n.yml', 'setup.yml']
files = ['manifest.yml', 'icon.png', 'setup.yml']
for name in files:
path = os.path.join(d, name)
if not os.path.exists(path):
raise ValidationError({'error': _('Applet pkg not valid, Missing file {}').format(name)})
with open(os.path.join(d, 'manifest.yml')) as f:
manifest = yaml.safe_load(f)
manifest = yaml_load_with_i18n(f)
if not manifest.get('name', ''):
raise ValidationError({'error': 'Missing name in manifest.yml'})

View File

@ -3,16 +3,16 @@ from urllib.parse import urljoin
from django.conf import settings
from django.core.cache import cache
from django.shortcuts import reverse
from django.forms import model_to_dict
from django.shortcuts import reverse
from django.template.loader import render_to_string
from django.utils.translation import ugettext_lazy as _
from notifications.notifications import UserMessage
from common.utils import get_logger, random_string
from common.db.encoder import ModelJSONFieldEncoder
from .models import Ticket
from common.utils import get_logger, random_string
from notifications.notifications import UserMessage
from . import const
from .models import Ticket
logger = get_logger(__file__)

52
utils/diff_internal.py Normal file
View File

@ -0,0 +1,52 @@
import difflib
import json
import sys
def diff_list(f1, f2):
with open(f1) as f:
data1 = json.load(f)
data1_mapper = {
d['name']: d for d in data1
}
with open(f2) as f:
data2 = json.load(f)
data2_mapper = {
d['name']: d for d in data2
}
d1_names = set(data1_mapper.keys())
d2_names = set(data2_mapper.keys())
diff_names = d1_names - d2_names
if diff_names:
print("Diff Names1: ", diff_names)
diff_names = d2_names - d1_names
if diff_names:
print("Diff Names2: ", diff_names)
for name, data in data1_mapper.items():
if name not in data2_mapper:
continue
data2 = data2_mapper[name]
print("Diff: ", name)
diff = difflib.unified_diff(
json.dumps(data, indent=4, sort_keys=True).splitlines(),
json.dumps(data2, indent=4, sort_keys=True).splitlines()
)
print('\n'.join(diff))
print()
if __name__ == '__main__':
if len(sys.argv) != 3:
print('Usage: python diff.py file1 file2')
sys.exit(1)
f1 = sys.argv[1]
f2 = sys.argv[2]
diff = diff_list(f1, f2)