mirror of https://github.com/jumpserver/jumpserver
perf: 处理冲突
commit
49c78f65a6
|
@ -16,7 +16,10 @@ class PushOrVerifyHostCallbackMixin:
|
|||
generate_private_key_path: callable
|
||||
|
||||
def host_callback(self, host, asset=None, account=None, automation=None, path_dir=None, **kwargs):
|
||||
host = super().host_callback(host, asset=asset, account=account, automation=automation, **kwargs)
|
||||
host = super().host_callback(
|
||||
host, asset=asset, account=account, automation=automation,
|
||||
path_dir=path_dir, **kwargs
|
||||
)
|
||||
if host.get('error'):
|
||||
return host
|
||||
|
||||
|
|
|
@ -10,7 +10,12 @@
|
|||
login_password: "{{ jms_account.secret }}"
|
||||
login_host: "{{ jms_asset.address }}"
|
||||
login_port: "{{ jms_asset.port }}"
|
||||
login_database: "{{ jms_asset.specific.db_name }}"
|
||||
login_database: "{{ jms_asset.spec_info.db_name }}"
|
||||
ssl: "{{ jms_asset.spec_info.use_ssl }}"
|
||||
ssl_ca_certs: "{{ jms_asset.secret_info.ca_cert }}"
|
||||
ssl_certfile: "{{ jms_asset.secret_info.client_key }}"
|
||||
connection_options:
|
||||
- tlsAllowInvalidHostnames: "{{ jms_asset.spec_info.allow_invalid_cert}}"
|
||||
register: db_info
|
||||
|
||||
- name: Display MongoDB version
|
||||
|
@ -24,8 +29,13 @@
|
|||
login_password: "{{ jms_account.secret }}"
|
||||
login_host: "{{ jms_asset.address }}"
|
||||
login_port: "{{ jms_asset.port }}"
|
||||
login_database: "{{ jms_asset.specific.db_name }}"
|
||||
db: "{{ jms_asset.specific.db_name }}"
|
||||
login_database: "{{ jms_asset.spec_info.db_name }}"
|
||||
ssl: "{{ jms_asset.spec_info.use_ssl }}"
|
||||
ssl_ca_certs: "{{ jms_asset.secret_info.ca_cert }}"
|
||||
ssl_certfile: "{{ jms_asset.secret_info.client_key }}"
|
||||
connection_options:
|
||||
- tlsAllowInvalidHostnames: "{{ jms_asset.spec_info.allow_invalid_cert}}"
|
||||
db: "{{ jms_asset.spec_info.db_name }}"
|
||||
name: "{{ account.username }}"
|
||||
password: "{{ account.secret }}"
|
||||
when: db_info is succeeded
|
||||
|
@ -37,7 +47,12 @@
|
|||
login_password: "{{ account.secret }}"
|
||||
login_host: "{{ jms_asset.address }}"
|
||||
login_port: "{{ jms_asset.port }}"
|
||||
login_database: "{{ jms_asset.specific.db_name }}"
|
||||
login_database: "{{ jms_asset.spec_info.db_name }}"
|
||||
ssl: "{{ jms_asset.spec_info.use_ssl }}"
|
||||
ssl_ca_certs: "{{ jms_asset.secret_info.ca_cert }}"
|
||||
ssl_certfile: "{{ jms_asset.secret_info.client_key }}"
|
||||
connection_options:
|
||||
- tlsAllowInvalidHostnames: "{{ jms_asset.spec_info.allow_invalid_cert}}"
|
||||
when:
|
||||
- db_info is succeeded
|
||||
- change_info is succeeded
|
||||
- db_info is succeeded
|
||||
- change_info is succeeded
|
||||
|
|
|
@ -10,7 +10,7 @@
|
|||
login_password: "{{ jms_account.secret }}"
|
||||
login_host: "{{ jms_asset.address }}"
|
||||
login_port: "{{ jms_asset.port }}"
|
||||
login_database: "{{ jms_asset.specific.db_name }}"
|
||||
login_database: "{{ jms_asset.spec_info.db_name }}"
|
||||
mode: "{{ jms_account.mode }}"
|
||||
register: db_info
|
||||
|
||||
|
@ -25,7 +25,7 @@
|
|||
login_password: "{{ jms_account.secret }}"
|
||||
login_host: "{{ jms_asset.address }}"
|
||||
login_port: "{{ jms_asset.port }}"
|
||||
login_database: "{{ jms_asset.specific.db_name }}"
|
||||
login_database: "{{ jms_asset.spec_info.db_name }}"
|
||||
mode: "{{ jms_account.mode }}"
|
||||
name: "{{ account.username }}"
|
||||
password: "{{ account.secret }}"
|
||||
|
@ -38,8 +38,7 @@
|
|||
login_password: "{{ account.secret }}"
|
||||
login_host: "{{ jms_asset.address }}"
|
||||
login_port: "{{ jms_asset.port }}"
|
||||
login_database: "{{ jms_asset.specific.db_name }}"
|
||||
mode: "{{ account.mode }}"
|
||||
login_database: "{{ jms_asset.spec_info.db_name }}"
|
||||
when:
|
||||
- db_info is succeeded
|
||||
- change_info is succeeded
|
||||
- db_info is succeeded
|
||||
- change_info is succeeded
|
||||
|
|
|
@ -10,7 +10,7 @@
|
|||
login_password: "{{ jms_account.secret }}"
|
||||
login_host: "{{ jms_asset.address }}"
|
||||
login_port: "{{ jms_asset.port }}"
|
||||
login_db: "{{ jms_asset.specific.db_name }}"
|
||||
login_db: "{{ jms_asset.spec_info.db_name }}"
|
||||
register: db_info
|
||||
|
||||
- name: Display PostgreSQL version
|
||||
|
@ -24,7 +24,7 @@
|
|||
login_password: "{{ jms_account.secret }}"
|
||||
login_host: "{{ jms_asset.address }}"
|
||||
login_port: "{{ jms_asset.port }}"
|
||||
db: "{{ jms_asset.specific.db_name }}"
|
||||
db: "{{ jms_asset.spec_info.db_name }}"
|
||||
name: "{{ account.username }}"
|
||||
password: "{{ account.secret }}"
|
||||
when: db_info is succeeded
|
||||
|
@ -36,7 +36,7 @@
|
|||
login_password: "{{ account.secret }}"
|
||||
login_host: "{{ jms_asset.address }}"
|
||||
login_port: "{{ jms_asset.port }}"
|
||||
db: "{{ jms_asset.specific.db_name }}"
|
||||
db: "{{ jms_asset.spec_info.db_name }}"
|
||||
when:
|
||||
- db_info is succeeded
|
||||
- change_info is succeeded
|
||||
- db_info is succeeded
|
||||
- change_info is succeeded
|
||||
|
|
|
@ -10,38 +10,38 @@
|
|||
login_password: "{{ jms_account.secret }}"
|
||||
login_host: "{{ jms_asset.address }}"
|
||||
login_port: "{{ jms_asset.port }}"
|
||||
name: '{{ jms_asset.specific.db_name }}'
|
||||
name: '{{ jms_asset.spec_info.db_name }}'
|
||||
script: |
|
||||
SELECT @@version
|
||||
register: db_info
|
||||
|
||||
- name: SQLServer version
|
||||
set_fact:
|
||||
info:
|
||||
version: "{{ db_info.query_results[0][0][0][0].splitlines()[0] }}"
|
||||
- debug:
|
||||
var: info
|
||||
- name: SQLServer version
|
||||
set_fact:
|
||||
info:
|
||||
version: "{{ db_info.query_results[0][0][0][0].splitlines()[0] }}"
|
||||
- debug:
|
||||
var: info
|
||||
|
||||
- name: Change SQLServer password
|
||||
community.general.mssql_script:
|
||||
login_user: "{{ jms_account.username }}"
|
||||
login_password: "{{ jms_account.secret }}"
|
||||
login_host: "{{ jms_asset.address }}"
|
||||
login_port: "{{ jms_asset.port }}"
|
||||
name: '{{ jms_asset.specific.db_name }}'
|
||||
script: "ALTER LOGIN {{ account.username }} WITH PASSWORD = '{{ account.secret }}'; select @@version"
|
||||
when: db_info is succeeded
|
||||
register: change_info
|
||||
- name: Change SQLServer password
|
||||
community.general.mssql_script:
|
||||
login_user: "{{ jms_account.username }}"
|
||||
login_password: "{{ jms_account.secret }}"
|
||||
login_host: "{{ jms_asset.address }}"
|
||||
login_port: "{{ jms_asset.port }}"
|
||||
name: '{{ jms_asset.spec_info.db_name }}'
|
||||
script: "ALTER LOGIN {{ account.username }} WITH PASSWORD = '{{ account.secret }}'; select @@version"
|
||||
when: db_info is succeeded
|
||||
register: change_info
|
||||
|
||||
- name: Verify password
|
||||
community.general.mssql_script:
|
||||
login_user: "{{ account.username }}"
|
||||
login_password: "{{ account.secret }}"
|
||||
login_host: "{{ jms_asset.address }}"
|
||||
login_port: "{{ jms_asset.port }}"
|
||||
name: '{{ jms_asset.specific.db_name }}'
|
||||
script: |
|
||||
SELECT @@version
|
||||
when:
|
||||
- db_info is succeeded
|
||||
- change_info is succeeded
|
||||
- name: Verify password
|
||||
community.general.mssql_script:
|
||||
login_user: "{{ account.username }}"
|
||||
login_password: "{{ account.secret }}"
|
||||
login_host: "{{ jms_asset.address }}"
|
||||
login_port: "{{ jms_asset.port }}"
|
||||
name: '{{ jms_asset.spec_info.db_name }}'
|
||||
script: |
|
||||
SELECT @@version
|
||||
when:
|
||||
- db_info is succeeded
|
||||
- change_info is succeeded
|
||||
|
|
|
@ -70,8 +70,14 @@ class ChangeSecretManager(AccountBasePlaybookManager):
|
|||
else:
|
||||
return self.secret_generator.get_secret()
|
||||
|
||||
def host_callback(self, host, asset=None, account=None, automation=None, path_dir=None, **kwargs):
|
||||
host = super().host_callback(host, asset=asset, account=account, automation=automation, **kwargs)
|
||||
def host_callback(
|
||||
self, host, asset=None, account=None,
|
||||
automation=None, path_dir=None, **kwargs
|
||||
):
|
||||
host = super().host_callback(
|
||||
host, asset=asset, account=account, automation=automation,
|
||||
path_dir=path_dir, **kwargs
|
||||
)
|
||||
if host.get('error'):
|
||||
return host
|
||||
|
||||
|
|
|
@ -10,7 +10,12 @@
|
|||
login_password: "{{ jms_account.secret }}"
|
||||
login_host: "{{ jms_asset.address }}"
|
||||
login_port: "{{ jms_asset.port }}"
|
||||
login_database: "{{ jms_asset.specific.db_name }}"
|
||||
login_database: "{{ jms_asset.spec_info.db_name }}"
|
||||
ssl: "{{ jms_asset.spec_info.use_ssl }}"
|
||||
ssl_ca_certs: "{{ jms_asset.secret_info.ca_cert }}"
|
||||
ssl_certfile: "{{ jms_asset.secret_info.client_key }}"
|
||||
connection_options:
|
||||
- tlsAllowInvalidHostnames: "{{ jms_asset.spec_info.allow_invalid_cert}}"
|
||||
filter: users
|
||||
register: db_info
|
||||
|
||||
|
|
|
@ -10,7 +10,7 @@
|
|||
login_password: "{{ jms_account.secret }}"
|
||||
login_host: "{{ jms_asset.address }}"
|
||||
login_port: "{{ jms_asset.port }}"
|
||||
login_database: "{{ jms_asset.specific.db_name }}"
|
||||
login_database: "{{ jms_asset.spec_info.db_name }}"
|
||||
mode: "{{ jms_account.mode }}"
|
||||
filter: users
|
||||
register: db_info
|
||||
|
|
|
@ -10,7 +10,7 @@
|
|||
login_password: "{{ jms_account.secret }}"
|
||||
login_host: "{{ jms_asset.address }}"
|
||||
login_port: "{{ jms_asset.port }}"
|
||||
login_db: "{{ jms_asset.specific.db_name }}"
|
||||
login_db: "{{ jms_asset.spec_info.db_name }}"
|
||||
filter: "roles"
|
||||
register: db_info
|
||||
|
||||
|
|
|
@ -2,8 +2,7 @@
|
|||
gather_facts: no
|
||||
tasks:
|
||||
- name: Gather posix account
|
||||
ansible.builtin.win_shell:
|
||||
cmd: net user
|
||||
ansible.builtin.win_shell: net user
|
||||
register: result
|
||||
|
||||
- name: Define info by set_fact
|
||||
|
|
|
@ -53,7 +53,7 @@ class GatherAccountsManager(AccountBasePlaybookManager):
|
|||
info = result.get('debug', {}).get('res', {}).get('info', {})
|
||||
asset = self.host_asset_mapper.get(host)
|
||||
if asset and info:
|
||||
result = self.filter_success_result(host, info)
|
||||
result = self.filter_success_result(asset.type, info)
|
||||
self.bulk_create_accounts(asset, result)
|
||||
else:
|
||||
logger.error("Not found info".format(host))
|
||||
|
|
|
@ -10,4 +10,9 @@
|
|||
login_password: "{{ jms_account.secret }}"
|
||||
login_host: "{{ jms_asset.address }}"
|
||||
login_port: "{{ jms_asset.port }}"
|
||||
login_database: "{{ jms_asset.specific.db_name }}"
|
||||
login_database: "{{ jms_asset.spec_info.db_name }}"
|
||||
ssl: "{{ jms_asset.spec_info.use_ssl }}"
|
||||
ssl_ca_certs: "{{ jms_asset.secret_info.ca_cert }}"
|
||||
ssl_certfile: "{{ jms_asset.secret_info.client_key }}"
|
||||
connection_options:
|
||||
- tlsAllowInvalidHostnames: "{{ jms_asset.spec_info.allow_invalid_cert}}"
|
||||
|
|
|
@ -10,5 +10,5 @@
|
|||
login_password: "{{ jms_account.secret }}"
|
||||
login_host: "{{ jms_asset.address }}"
|
||||
login_port: "{{ jms_asset.port }}"
|
||||
login_database: "{{ jms_asset.specific.db_name }}"
|
||||
login_database: "{{ jms_asset.spec_info.db_name }}"
|
||||
mode: "{{ jms_account.mode }}"
|
||||
|
|
|
@ -10,4 +10,4 @@
|
|||
login_password: "{{ account.secret }}"
|
||||
login_host: "{{ jms_asset.address }}"
|
||||
login_port: "{{ jms_asset.port }}"
|
||||
db: "{{ jms_asset.specific.db_name }}"
|
||||
db: "{{ jms_asset.spec_info.db_name }}"
|
||||
|
|
|
@ -10,6 +10,6 @@
|
|||
login_password: "{{ jms_account.secret }}"
|
||||
login_host: "{{ jms_asset.address }}"
|
||||
login_port: "{{ jms_asset.port }}"
|
||||
name: '{{ jms_asset.specific.db_name }}'
|
||||
name: '{{ jms_asset.spec_info.db_name }}'
|
||||
script: |
|
||||
SELECT @@version
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
- hosts: windows
|
||||
gather_facts: yes
|
||||
gather_facts: no
|
||||
tasks:
|
||||
- name: Verify account
|
||||
ansible.windows.win_ping:
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
from django.db.models import QuerySet
|
||||
|
||||
from common.utils import get_logger
|
||||
from accounts.const import AutomationTypes, Connectivity
|
||||
from common.utils import get_logger
|
||||
from ..base.manager import PushOrVerifyHostCallbackMixin, AccountBasePlaybookManager
|
||||
|
||||
logger = get_logger(__name__)
|
||||
|
@ -29,4 +29,4 @@ class VerifyAccountManager(PushOrVerifyHostCallbackMixin, AccountBasePlaybookMan
|
|||
|
||||
def on_host_error(self, host, error, result):
|
||||
account = self.host_account_mapper.get(host)
|
||||
account.set_connectivity(Connectivity.FAILED)
|
||||
account.set_connectivity(Connectivity.ERR)
|
||||
|
|
|
@ -29,8 +29,7 @@ class Migration(migrations.Migration):
|
|||
('id', models.UUIDField(default=uuid.uuid4, primary_key=True, serialize=False)),
|
||||
('org_id',
|
||||
models.CharField(blank=True, db_index=True, default='', max_length=36, verbose_name='Organization')),
|
||||
('connectivity', models.CharField(choices=[('unknown', 'Unknown'), ('ok', 'Ok'), ('failed', 'Failed')],
|
||||
default='unknown', max_length=16, verbose_name='Connectivity')),
|
||||
('connectivity', models.CharField(choices=[('-', 'Unknown'), ('ok', 'Ok'), ('err', 'Error')], default='-', max_length=16, verbose_name='Connectivity')),
|
||||
('date_verified', models.DateTimeField(null=True, verbose_name='Date verified')),
|
||||
('name', models.CharField(max_length=128, verbose_name='Name')),
|
||||
('username', models.CharField(blank=True, db_index=True, max_length=128, verbose_name='Username')),
|
||||
|
|
|
@ -50,7 +50,7 @@ class BaseAccount(JMSOrgBaseModel):
|
|||
return bool(self.username)
|
||||
|
||||
@property
|
||||
def specific(self):
|
||||
def spec_info(self):
|
||||
data = {}
|
||||
if self.secret_type != SecretType.SSH_KEY:
|
||||
return data
|
||||
|
@ -92,6 +92,9 @@ class BaseAccount(JMSOrgBaseModel):
|
|||
else:
|
||||
return ''
|
||||
|
||||
if not public_key:
|
||||
return ''
|
||||
|
||||
public_key_obj = sshpubkeys.SSHKey(public_key)
|
||||
fingerprint = public_key_obj.hash_md5()
|
||||
return fingerprint
|
||||
|
|
|
@ -5,72 +5,68 @@ from assets.models import Asset
|
|||
from accounts.const import SecretType, Source
|
||||
from accounts.models import Account, AccountTemplate
|
||||
from accounts.tasks import push_accounts_to_assets
|
||||
from assets.const import Category, AllTypes
|
||||
from common.serializers.fields import ObjectRelatedField, LabeledChoiceField
|
||||
from common.serializers import SecretReadableMixin, BulkModelSerializer
|
||||
from .base import BaseAccountSerializer
|
||||
|
||||
|
||||
class AccountSerializerCreateValidateMixin:
|
||||
replace_attrs: callable
|
||||
id: str
|
||||
template: bool
|
||||
push_now: bool
|
||||
replace_attrs: callable
|
||||
|
||||
def validate(self, attrs):
|
||||
_id = attrs.pop('id', None)
|
||||
if _id:
|
||||
def to_internal_value(self, data):
|
||||
_id = data.pop('id', None)
|
||||
ret = super().to_internal_value(data)
|
||||
self.id = _id
|
||||
return ret
|
||||
|
||||
def set_secret(self, attrs):
|
||||
_id = self.id
|
||||
template = attrs.pop('template', None)
|
||||
|
||||
if _id and template:
|
||||
account_template = AccountTemplate.objects.get(id=_id)
|
||||
attrs['secret'] = account_template.secret
|
||||
account_template = attrs.pop('template', None)
|
||||
if account_template:
|
||||
self.replace_attrs(account_template, attrs)
|
||||
self.push_now = attrs.pop('push_now', False)
|
||||
return super().validate(attrs)
|
||||
elif _id and not template:
|
||||
account = Account.objects.get(id=_id)
|
||||
attrs['secret'] = account.secret
|
||||
return attrs
|
||||
|
||||
def validate(self, attrs):
|
||||
attrs = super().validate(attrs)
|
||||
return self.set_secret(attrs)
|
||||
|
||||
def create(self, validated_data):
|
||||
push_now = validated_data.pop('push_now', None)
|
||||
instance = super().create(validated_data)
|
||||
if push_now:
|
||||
push_accounts_to_assets.delay([instance.id], [instance.asset_id])
|
||||
return instance
|
||||
|
||||
|
||||
class AccountSerializerCreateMixin(
|
||||
AccountSerializerCreateValidateMixin, BulkModelSerializer
|
||||
):
|
||||
template = serializers.UUIDField(
|
||||
required=False, allow_null=True, write_only=True,
|
||||
label=_('Account template')
|
||||
template = serializers.BooleanField(
|
||||
default=False, label=_("Template"), write_only=True
|
||||
)
|
||||
push_now = serializers.BooleanField(
|
||||
default=False, label=_("Push now"), write_only=True
|
||||
)
|
||||
has_secret = serializers.BooleanField(label=_("Has secret"), read_only=True)
|
||||
|
||||
@staticmethod
|
||||
def validate_template(value):
|
||||
try:
|
||||
return AccountTemplate.objects.get(id=value)
|
||||
except AccountTemplate.DoesNotExist:
|
||||
raise serializers.ValidationError(_('Account template not found'))
|
||||
|
||||
@staticmethod
|
||||
def replace_attrs(account_template: AccountTemplate, attrs: dict):
|
||||
exclude_fields = [
|
||||
'_state', 'org_id', 'id', 'date_created',
|
||||
'date_updated'
|
||||
]
|
||||
template_attrs = {
|
||||
k: v for k, v in account_template.__dict__.items()
|
||||
if k not in exclude_fields
|
||||
}
|
||||
for k, v in template_attrs.items():
|
||||
attrs.setdefault(k, v)
|
||||
|
||||
def create(self, validated_data):
|
||||
instance = super().create(validated_data)
|
||||
if self.push_now:
|
||||
push_accounts_to_assets.delay([instance.id], [instance.asset_id])
|
||||
return instance
|
||||
|
||||
|
||||
class AccountAssetSerializer(serializers.ModelSerializer):
|
||||
platform = ObjectRelatedField(read_only=True)
|
||||
category = LabeledChoiceField(choices=Category.choices, read_only=True, label=_('Category'))
|
||||
type = LabeledChoiceField(choices=AllTypes.choices(), read_only=True, label=_('Type'))
|
||||
|
||||
class Meta:
|
||||
model = Asset
|
||||
fields = ['id', 'name', 'address', 'platform']
|
||||
fields = ['id', 'name', 'address', 'type', 'category', 'platform']
|
||||
|
||||
def to_internal_value(self, data):
|
||||
if isinstance(data, dict):
|
||||
|
|
|
@ -36,7 +36,7 @@ class AccountBackupSerializer(PeriodTaskSerializerMixin, BulkOrgResourceModelSer
|
|||
|
||||
|
||||
class AccountBackupPlanExecutionSerializer(serializers.ModelSerializer):
|
||||
trigger = LabeledChoiceField(choices=Trigger.choices, label=_("Trigger mode"))
|
||||
trigger = LabeledChoiceField(choices=Trigger.choices, label=_("Trigger mode"), read_only=True)
|
||||
|
||||
class Meta:
|
||||
model = AccountBackupExecution
|
||||
|
|
|
@ -16,7 +16,7 @@ class AuthValidateMixin(serializers.Serializer):
|
|||
choices=SecretType.choices, required=True, label=_('Secret type')
|
||||
)
|
||||
secret = EncryptedField(
|
||||
label=_('Secret'), required=False, max_length=40960, allow_blank=True,
|
||||
label=_('Secret/Password'), required=False, max_length=40960, allow_blank=True,
|
||||
allow_null=True, write_only=True,
|
||||
)
|
||||
passphrase = serializers.CharField(
|
||||
|
@ -68,14 +68,14 @@ class BaseAccountSerializer(AuthValidateMixin, BulkOrgResourceModelSerializer):
|
|||
fields_mini = ['id', 'name', 'username']
|
||||
fields_small = fields_mini + [
|
||||
'secret_type', 'secret', 'has_secret', 'passphrase',
|
||||
'privileged', 'is_active', 'specific',
|
||||
'privileged', 'is_active', 'spec_info',
|
||||
]
|
||||
fields_other = ['created_by', 'date_created', 'date_updated', 'comment']
|
||||
fields = fields_small + fields_other
|
||||
read_only_fields = [
|
||||
'has_secret', 'specific',
|
||||
'has_secret', 'spec_info',
|
||||
'date_verified', 'created_by', 'date_created',
|
||||
]
|
||||
extra_kwargs = {
|
||||
'specific': {'label': _('Specific')},
|
||||
'spec_info': {'label': _('Spec info')},
|
||||
}
|
||||
|
|
|
@ -20,14 +20,14 @@ class Migration(migrations.Migration):
|
|||
),
|
||||
migrations.AlterModelOptions(
|
||||
name='commandfilteracl',
|
||||
options={'ordering': ('priority', 'name'), 'verbose_name': 'Command acl'},
|
||||
options={'ordering': ('priority', 'date_updated', 'name'), 'verbose_name': 'Command acl'},
|
||||
),
|
||||
migrations.AlterModelOptions(
|
||||
name='loginacl',
|
||||
options={'ordering': ('priority', 'name'), 'verbose_name': 'Login acl'},
|
||||
options={'ordering': ('priority', 'date_updated', 'name'), 'verbose_name': 'Login acl'},
|
||||
),
|
||||
migrations.AlterModelOptions(
|
||||
name='loginassetacl',
|
||||
options={'ordering': ('priority', 'name'), 'verbose_name': 'Login asset acl'},
|
||||
options={'ordering': ('priority', 'date_updated', 'name'), 'verbose_name': 'Login asset acl'},
|
||||
),
|
||||
]
|
||||
|
|
|
@ -5,7 +5,7 @@ from django.utils.translation import ugettext_lazy as _
|
|||
|
||||
from common.db.models import JMSBaseModel
|
||||
from common.utils import contains_ip
|
||||
from orgs.mixins.models import OrgModelMixin
|
||||
from orgs.mixins.models import OrgModelMixin, OrgManager
|
||||
|
||||
__all__ = [
|
||||
'ACLManager',
|
||||
|
@ -67,6 +67,10 @@ class ACLManager(models.Manager):
|
|||
return self.get_queryset().valid()
|
||||
|
||||
|
||||
class OrgACLManager(OrgManager, ACLManager):
|
||||
pass
|
||||
|
||||
|
||||
class BaseACL(JMSBaseModel):
|
||||
name = models.CharField(max_length=128, verbose_name=_('Name'))
|
||||
priority = models.IntegerField(
|
||||
|
@ -82,7 +86,7 @@ class BaseACL(JMSBaseModel):
|
|||
objects = ACLManager.from_queryset(BaseACLQuerySet)()
|
||||
|
||||
class Meta:
|
||||
ordering = ('priority', 'name')
|
||||
ordering = ('priority', 'date_updated', 'name')
|
||||
abstract = True
|
||||
|
||||
def is_action(self, action):
|
||||
|
@ -97,7 +101,7 @@ class UserAssetAccountBaseACL(BaseACL, OrgModelMixin):
|
|||
# username_group
|
||||
accounts = models.JSONField(verbose_name=_('Account'))
|
||||
|
||||
objects = ACLManager.from_queryset(UserAssetAccountACLQuerySet)()
|
||||
objects = OrgACLManager.from_queryset(UserAssetAccountACLQuerySet)()
|
||||
|
||||
class Meta(BaseACL.Meta):
|
||||
unique_together = ('name', 'org_id')
|
||||
|
|
|
@ -52,10 +52,10 @@ class LoginACLSerializer(BulkModelSerializer):
|
|||
action = self.fields.get("action")
|
||||
if not action:
|
||||
return
|
||||
choices = action._choices
|
||||
choices = action.choices
|
||||
if not has_valid_xpack_license():
|
||||
choices.pop(LoginACL.ActionChoices.review, None)
|
||||
action._choices = choices
|
||||
action.choices = choices
|
||||
|
||||
def get_rules_serializer(self):
|
||||
return RuleSerializer()
|
||||
|
|
|
@ -30,10 +30,31 @@ __all__ = [
|
|||
|
||||
|
||||
class AssetFilterSet(BaseFilterSet):
|
||||
labels = django_filters.CharFilter(method='filter_labels')
|
||||
platform = django_filters.CharFilter(method='filter_platform')
|
||||
type = django_filters.CharFilter(field_name="platform__type", lookup_expr="exact")
|
||||
category = django_filters.CharFilter(field_name="platform__category", lookup_expr="exact")
|
||||
platform = django_filters.CharFilter(method='filter_platform')
|
||||
labels = django_filters.CharFilter(method='filter_labels')
|
||||
domain_enabled = django_filters.BooleanFilter(
|
||||
field_name="platform__domain_enabled", lookup_expr="exact"
|
||||
)
|
||||
ping_enabled = django_filters.BooleanFilter(
|
||||
field_name="platform__automation__ping_enabled", lookup_expr="exact"
|
||||
)
|
||||
gather_facts_enabled = django_filters.BooleanFilter(
|
||||
field_name="platform__automation__gather_facts_enabled", lookup_expr="exact"
|
||||
)
|
||||
change_secret_enabled = django_filters.BooleanFilter(
|
||||
field_name="platform__automation__change_secret_enabled", lookup_expr="exact"
|
||||
)
|
||||
push_account_enabled = django_filters.BooleanFilter(
|
||||
field_name="platform__automation__push_account_enabled", lookup_expr="exact"
|
||||
)
|
||||
verify_account_enabled = django_filters.BooleanFilter(
|
||||
field_name="platform__automation__verify_account_enabled", lookup_expr="exact"
|
||||
)
|
||||
gather_accounts_enabled = django_filters.BooleanFilter(
|
||||
field_name="platform__automation__gather_accounts_enabled", lookup_expr="exact"
|
||||
)
|
||||
|
||||
class Meta:
|
||||
model = Asset
|
||||
|
@ -73,11 +94,13 @@ class AssetViewSet(SuggestionMixin, NodeFilterMixin, OrgBulkModelViewSet):
|
|||
("platform", serializers.PlatformSerializer),
|
||||
("suggestion", serializers.MiniAssetSerializer),
|
||||
("gateways", serializers.GatewaySerializer),
|
||||
("spec_info", serializers.SpecSerializer)
|
||||
)
|
||||
rbac_perms = (
|
||||
("match", "assets.match_asset"),
|
||||
("platform", "assets.view_platform"),
|
||||
("gateways", "assets.view_gateway"),
|
||||
("spec_info", "assets.view_asset"),
|
||||
)
|
||||
extra_filter_backends = [LabelFilterBackend, IpInFilterBackend, NodeFilterBackend]
|
||||
|
||||
|
@ -95,6 +118,11 @@ class AssetViewSet(SuggestionMixin, NodeFilterMixin, OrgBulkModelViewSet):
|
|||
serializer = super().get_serializer(instance=asset.platform)
|
||||
return Response(serializer.data)
|
||||
|
||||
@action(methods=["GET"], detail=True, url_path="spec-info")
|
||||
def spec_info(self, *args, **kwargs):
|
||||
asset = super().get_object()
|
||||
return Response(asset.spec_info)
|
||||
|
||||
@action(methods=["GET"], detail=True, url_path="gateways")
|
||||
def gateways(self, *args, **kwargs):
|
||||
asset = self.get_object()
|
||||
|
@ -104,6 +132,11 @@ class AssetViewSet(SuggestionMixin, NodeFilterMixin, OrgBulkModelViewSet):
|
|||
gateways = asset.domain.gateways
|
||||
return self.get_paginated_response_from_queryset(gateways)
|
||||
|
||||
def create(self, request, *args, **kwargs):
|
||||
if request.path.find('/api/v1/assets/assets/') > -1:
|
||||
return Response({'error': _('Cannot create asset directly, you should create a host or other')}, status=400)
|
||||
return super().create(request, *args, **kwargs)
|
||||
|
||||
|
||||
class AssetsTaskMixin:
|
||||
request: Request
|
||||
|
|
|
@ -1,5 +1,8 @@
|
|||
from assets.models import Host, Asset
|
||||
from assets.serializers import HostSerializer
|
||||
from assets.serializers import HostSerializer, HostInfoSerializer
|
||||
from rest_framework.decorators import action
|
||||
from rest_framework.response import Response
|
||||
|
||||
from .asset import AssetViewSet
|
||||
|
||||
__all__ = ['HostViewSet']
|
||||
|
@ -12,4 +15,11 @@ class HostViewSet(AssetViewSet):
|
|||
def get_serializer_classes(self):
|
||||
serializer_classes = super().get_serializer_classes()
|
||||
serializer_classes['default'] = HostSerializer
|
||||
serializer_classes['info'] = HostInfoSerializer
|
||||
return serializer_classes
|
||||
|
||||
@action(methods=["GET"], detail=True, url_path="info")
|
||||
def info(self, *args, **kwargs):
|
||||
asset = super().get_object()
|
||||
return Response(asset.info)
|
||||
|
||||
|
|
|
@ -119,7 +119,7 @@ class NodeChildrenAsTreeApi(SerializeToTreeNodeMixin, NodeChildrenApi):
|
|||
query_all = self.request.query_params.get("all", "0") == "all"
|
||||
include_assets = self.request.query_params.get('assets', '0') == '1'
|
||||
if not self.instance or not include_assets:
|
||||
return []
|
||||
return Asset.objects.none()
|
||||
if query_all:
|
||||
assets = self.instance.get_all_assets_for_tree()
|
||||
else:
|
||||
|
|
|
@ -66,6 +66,33 @@ class BasePlaybookManager:
|
|||
os.makedirs(path, exist_ok=True, mode=0o755)
|
||||
return path
|
||||
|
||||
@staticmethod
|
||||
def write_cert_to_file(filename, content):
|
||||
with open(filename, 'w') as f:
|
||||
f.write(content)
|
||||
return filename
|
||||
|
||||
def convert_cert_to_file(self, host, path_dir):
|
||||
if not path_dir:
|
||||
return host
|
||||
|
||||
specific = host.get('jms_asset', {}).get('secret_info', {})
|
||||
cert_fields = ('ca_cert', 'client_key', 'client_cert')
|
||||
filtered = list(filter(lambda x: specific.get(x), cert_fields))
|
||||
if not filtered:
|
||||
return host
|
||||
|
||||
cert_dir = os.path.join(path_dir, 'certs')
|
||||
if not os.path.exists(cert_dir):
|
||||
os.makedirs(cert_dir, 0o700, True)
|
||||
|
||||
for f in filtered:
|
||||
result = self.write_cert_to_file(
|
||||
os.path.join(cert_dir, f), specific.get(f)
|
||||
)
|
||||
host['jms_asset']['secret_info'][f] = result
|
||||
return host
|
||||
|
||||
def host_callback(self, host, automation=None, **kwargs):
|
||||
enabled_attr = '{}_enabled'.format(self.__class__.method_type())
|
||||
method_attr = '{}_method'.format(self.__class__.method_type())
|
||||
|
@ -78,6 +105,8 @@ class BasePlaybookManager:
|
|||
if not method_enabled:
|
||||
host['error'] = _('{} disabled'.format(self.__class__.method_type()))
|
||||
return host
|
||||
|
||||
host = self.convert_cert_to_file(host, kwargs.get('path_dir'))
|
||||
return host
|
||||
|
||||
@staticmethod
|
||||
|
|
|
@ -10,7 +10,12 @@
|
|||
login_password: "{{ jms_account.secret }}"
|
||||
login_host: "{{ jms_asset.address }}"
|
||||
login_port: "{{ jms_asset.port }}"
|
||||
login_database: "{{ jms_asset.specific.db_name }}"
|
||||
login_database: "{{ jms_asset.spec_info.db_name }}"
|
||||
ssl: "{{ jms_asset.spec_info.use_ssl }}"
|
||||
ssl_ca_certs: "{{ jms_asset.secret_info.ca_cert }}"
|
||||
ssl_certfile: "{{ jms_asset.secret_info.client_key }}"
|
||||
connection_options:
|
||||
- tlsAllowInvalidHostnames: "{{ jms_asset.spec_info.allow_invalid_cert}}"
|
||||
register: db_info
|
||||
|
||||
- name: Define info by set_fact
|
||||
|
|
|
@ -10,7 +10,7 @@
|
|||
login_password: "{{ jms_account.secret }}"
|
||||
login_host: "{{ jms_asset.address }}"
|
||||
login_port: "{{ jms_asset.port }}"
|
||||
login_database: "{{ jms_asset.specific.db_name }}"
|
||||
login_database: "{{ jms_asset.spec_info.db_name }}"
|
||||
mode: "{{ jms_account.mode }}"
|
||||
register: db_info
|
||||
|
||||
|
|
|
@ -10,7 +10,7 @@
|
|||
login_password: "{{ jms_account.secret }}"
|
||||
login_host: "{{ jms_asset.address }}"
|
||||
login_port: "{{ jms_asset.port }}"
|
||||
login_db: "{{ jms_asset.specific.db_name }}"
|
||||
login_db: "{{ jms_asset.spec_info.db_name }}"
|
||||
register: db_info
|
||||
|
||||
- name: Define info by set_fact
|
||||
|
|
|
@ -4,16 +4,21 @@
|
|||
- name: Get info
|
||||
ansible.builtin.set_fact:
|
||||
info:
|
||||
arch: "{{ ansible_architecture }}"
|
||||
distribution: "{{ ansible_distribution }}"
|
||||
distribution_version: "{{ ansible_distribution_version }}"
|
||||
kernel: "{{ ansible_kernel }}"
|
||||
vendor: "{{ ansible_system_vendor }}"
|
||||
model: "{{ ansible_product_name }}"
|
||||
sn: "{{ ansible_product_serial }}"
|
||||
cpu_model: "{{ ansible_processor }}"
|
||||
cpu_count: "{{ ansible_processor_count }}"
|
||||
cpu_cores: "{{ ansible_processor_cores }}"
|
||||
cpu_vcpus: "{{ ansible_processor_vcpus }}"
|
||||
memory: "{{ ansible_memtotal_mb }}"
|
||||
disk_total: "{{ (ansible_mounts | map(attribute='size_total') | sum / 1024 / 1024 / 1024) | round(2) }}"
|
||||
distribution: "{{ ansible_distribution }}"
|
||||
distribution_version: "{{ ansible_distribution_version }}"
|
||||
arch: "{{ ansible_architecture }}"
|
||||
kernel: "{{ ansible_kernel }}"
|
||||
|
||||
|
||||
|
||||
- debug:
|
||||
var: info
|
||||
|
|
|
@ -10,4 +10,9 @@
|
|||
login_password: "{{ jms_account.secret }}"
|
||||
login_host: "{{ jms_asset.address }}"
|
||||
login_port: "{{ jms_asset.port }}"
|
||||
login_database: "{{ jms_asset.specific.db_name }}"
|
||||
login_database: "{{ jms_asset.spec_info.db_name }}"
|
||||
ssl: "{{ jms_asset.spec_info.use_ssl }}"
|
||||
ssl_ca_certs: "{{ jms_asset.secret_info.ca_cert }}"
|
||||
ssl_certfile: "{{ jms_asset.secret_info.client_key }}"
|
||||
connection_options:
|
||||
- tlsAllowInvalidHostnames: "{{ jms_asset.spec_info.allow_invalid_cert}}"
|
||||
|
|
|
@ -10,5 +10,5 @@
|
|||
login_password: "{{ jms_account.secret }}"
|
||||
login_host: "{{ jms_asset.address }}"
|
||||
login_port: "{{ jms_asset.port }}"
|
||||
login_database: "{{ jms_asset.specific.db_name }}"
|
||||
login_database: "{{ jms_asset.spec_info.db_name }}"
|
||||
mode: "{{ jms_account.mode }}"
|
||||
|
|
|
@ -10,4 +10,4 @@
|
|||
login_password: "{{ jms_account.secret }}"
|
||||
login_host: "{{ jms_asset.address }}"
|
||||
login_port: "{{ jms_asset.port }}"
|
||||
login_db: "{{ jms_asset.specific.db_name }}"
|
||||
login_db: "{{ jms_asset.spec_info.db_name }}"
|
||||
|
|
|
@ -10,6 +10,6 @@
|
|||
login_password: "{{ jms_account.secret }}"
|
||||
login_host: "{{ jms_asset.address }}"
|
||||
login_port: "{{ jms_asset.port }}"
|
||||
name: '{{ jms_asset.specific.db_name }}'
|
||||
name: '{{ jms_asset.spec_info.db_name }}'
|
||||
script: |
|
||||
SELECT @@version
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
from common.utils import get_logger
|
||||
from assets.const import AutomationTypes, Connectivity
|
||||
from common.utils import get_logger
|
||||
from ..base.manager import BasePlaybookManager
|
||||
|
||||
logger = get_logger(__name__)
|
||||
|
@ -28,7 +28,7 @@ class PingManager(BasePlaybookManager):
|
|||
|
||||
def on_host_error(self, host, error, result):
|
||||
asset, account = self.host_asset_and_account_mapper.get(host)
|
||||
asset.set_connectivity(Connectivity.FAILED)
|
||||
asset.set_connectivity(Connectivity.ERR)
|
||||
if not account:
|
||||
return
|
||||
account.set_connectivity(Connectivity.FAILED)
|
||||
account.set_connectivity(Connectivity.ERR)
|
||||
|
|
|
@ -1,12 +1,12 @@
|
|||
import socket
|
||||
import paramiko
|
||||
|
||||
import paramiko
|
||||
from django.utils import timezone
|
||||
from django.utils.translation import ugettext_lazy as _
|
||||
|
||||
from common.utils import get_logger
|
||||
from assets.models import Gateway
|
||||
from assets.const import AutomationTypes, Connectivity
|
||||
from assets.models import Gateway
|
||||
from common.utils import get_logger
|
||||
|
||||
logger = get_logger(__name__)
|
||||
|
||||
|
@ -33,7 +33,7 @@ class PingGatewayManager:
|
|||
err = _('No account')
|
||||
return False, err
|
||||
|
||||
logger.debug('Test account: {}'.format(account))
|
||||
print('Test account: {}'.format(account))
|
||||
try:
|
||||
proxy.connect(
|
||||
gateway.address,
|
||||
|
@ -91,7 +91,7 @@ class PingGatewayManager:
|
|||
|
||||
@staticmethod
|
||||
def on_host_success(gateway, account):
|
||||
logger.info('\033[32m {} -> {}\033[0m\n'.format(gateway, account))
|
||||
print('\033[32m {} -> {}\033[0m\n'.format(gateway, account))
|
||||
gateway.set_connectivity(Connectivity.OK)
|
||||
if not account:
|
||||
return
|
||||
|
@ -99,15 +99,15 @@ class PingGatewayManager:
|
|||
|
||||
@staticmethod
|
||||
def on_host_error(gateway, account, error):
|
||||
logger.info('\033[31m {} -> {} 原因: {} \033[0m\n'.format(gateway, account, error))
|
||||
gateway.set_connectivity(Connectivity.FAILED)
|
||||
print('\033[31m {} -> {} 原因: {} \033[0m\n'.format(gateway, account, error))
|
||||
gateway.set_connectivity(Connectivity.ERR)
|
||||
if not account:
|
||||
return
|
||||
account.set_connectivity(Connectivity.FAILED)
|
||||
account.set_connectivity(Connectivity.ERR)
|
||||
|
||||
@staticmethod
|
||||
def before_runner_start():
|
||||
logger.info(">>> 开始执行测试网关可连接性任务")
|
||||
print(">>> 开始执行测试网关可连接性任务")
|
||||
|
||||
def get_accounts(self, gateway):
|
||||
account = gateway.select_account
|
||||
|
|
|
@ -3,9 +3,9 @@ from django.utils.translation import ugettext_lazy as _
|
|||
|
||||
|
||||
class Connectivity(TextChoices):
|
||||
UNKNOWN = 'unknown', _('Unknown')
|
||||
UNKNOWN = '-', _('Unknown')
|
||||
OK = 'ok', _('Ok')
|
||||
FAILED = 'failed', _('Failed')
|
||||
ERR = 'err', _('Error')
|
||||
|
||||
|
||||
class AutomationTypes(TextChoices):
|
||||
|
|
|
@ -31,11 +31,11 @@ class DeviceTypes(BaseType):
|
|||
def _get_automation_constrains(cls) -> dict:
|
||||
return {
|
||||
'*': {
|
||||
'ansible_enabled': True,
|
||||
'ansible_enabled': False,
|
||||
'ansible_config': {
|
||||
'ansible_connection': 'local',
|
||||
},
|
||||
'ping_enabled': True,
|
||||
'ping_enabled': False,
|
||||
'gather_facts_enabled': False,
|
||||
'gather_accounts_enabled': False,
|
||||
'verify_account_enabled': False,
|
||||
|
|
|
@ -71,7 +71,7 @@ class HostTypes(BaseType):
|
|||
{'name': 'Linux'},
|
||||
{
|
||||
'name': GATEWAY_NAME,
|
||||
'domain_enabled': False,
|
||||
'domain_enabled': True,
|
||||
}
|
||||
],
|
||||
cls.UNIX: [
|
||||
|
|
|
@ -197,7 +197,7 @@ class AllTypes(ChoicesMixin):
|
|||
category_type_mapper[p.category] += platform_count[p.id]
|
||||
tp_platforms[p.category + '_' + p.type].append(p)
|
||||
|
||||
root = dict(id='ROOT', name=_('All types'), title='所有类型', open=True, isParent=True)
|
||||
root = dict(id='ROOT', name=_('All types'), title=_('All types'), open=True, isParent=True)
|
||||
nodes = [root]
|
||||
for category, type_cls in cls.category_types():
|
||||
# Category 格式化
|
||||
|
|
|
@ -20,6 +20,8 @@ class WebTypes(BaseType):
|
|||
def _get_automation_constrains(cls) -> dict:
|
||||
constrains = {
|
||||
'*': {
|
||||
'ansible_enabled': False,
|
||||
'ping_enabled': False,
|
||||
'gather_facts_enabled': False,
|
||||
'verify_account_enabled': False,
|
||||
'change_secret_enabled': False,
|
||||
|
|
|
@ -13,7 +13,7 @@ class Migration(migrations.Migration):
|
|||
migrations.AddField(
|
||||
model_name='asset',
|
||||
name='connectivity',
|
||||
field=models.CharField(choices=[('unknown', 'Unknown'), ('ok', 'Ok'), ('failed', 'Failed')], default='unknown', max_length=16, verbose_name='Connectivity'),
|
||||
field=models.CharField(choices=[('-', 'Unknown'), ('ok', 'Ok'), ('err', 'Error')], default='-', max_length=16, verbose_name='Connectivity'),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='asset',
|
||||
|
@ -23,7 +23,7 @@ class Migration(migrations.Migration):
|
|||
migrations.AddField(
|
||||
model_name='authbook',
|
||||
name='connectivity',
|
||||
field=models.CharField(choices=[('unknown', 'Unknown'), ('ok', 'Ok'), ('failed', 'Failed')], default='unknown', max_length=16, verbose_name='Connectivity'),
|
||||
field=models.CharField(choices=[('-', 'Unknown'), ('ok', 'Ok'), ('err', 'Error')], default='-', max_length=16, verbose_name='Connectivity'),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='authbook',
|
||||
|
@ -33,7 +33,7 @@ class Migration(migrations.Migration):
|
|||
migrations.AddField(
|
||||
model_name='historicalauthbook',
|
||||
name='connectivity',
|
||||
field=models.CharField(choices=[('unknown', 'Unknown'), ('ok', 'Ok'), ('failed', 'Failed')], default='unknown', max_length=16, verbose_name='Connectivity'),
|
||||
field=models.CharField(choices=[('-', 'Unknown'), ('ok', 'Ok'), ('err', 'Error')], default='-', max_length=16, verbose_name='Connectivity'),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='historicalauthbook',
|
||||
|
|
|
@ -2,6 +2,7 @@
|
|||
|
||||
import django.db
|
||||
from django.db import migrations, models
|
||||
import common.db.fields
|
||||
|
||||
|
||||
def migrate_to_host(apps, schema_editor):
|
||||
|
@ -71,12 +72,18 @@ class Migration(migrations.Migration):
|
|||
),
|
||||
migrations.AlterModelOptions(
|
||||
name='asset',
|
||||
options={'ordering': ['name'],
|
||||
'permissions': [('refresh_assethardwareinfo', 'Can refresh asset hardware info'),
|
||||
('test_assetconnectivity', 'Can test asset connectivity'),
|
||||
('push_assetsystemuser', 'Can push system user to asset'),
|
||||
('match_asset', 'Can match asset'), ('add_assettonode', 'Add asset to node'),
|
||||
('move_assettonode', 'Move asset to node')], 'verbose_name': 'Asset'},
|
||||
options={
|
||||
'ordering': ['name'],
|
||||
'permissions': [
|
||||
('refresh_assethardwareinfo', 'Can refresh asset hardware info'),
|
||||
('test_assetconnectivity', 'Can test asset connectivity'),
|
||||
('push_assetaccount', 'Can push account to asset'),
|
||||
('test_account', 'Can verify account'), ('match_asset', 'Can match asset'),
|
||||
('add_assettonode', 'Add asset to node'),
|
||||
('move_assettonode', 'Move asset to node')
|
||||
],
|
||||
'verbose_name': 'Asset'
|
||||
},
|
||||
),
|
||||
migrations.RenameField(
|
||||
model_name='asset',
|
||||
|
@ -114,9 +121,9 @@ class Migration(migrations.Migration):
|
|||
primary_key=True, serialize=False, to='assets.asset')),
|
||||
('db_name', models.CharField(blank=True, max_length=1024, verbose_name='Database')),
|
||||
('allow_invalid_cert', models.BooleanField(default=False, verbose_name='Allow invalid cert')),
|
||||
('ca_cert', models.TextField(blank=True, verbose_name='CA cert')),
|
||||
('client_cert', models.TextField(blank=True, verbose_name='Client cert')),
|
||||
('client_key', models.TextField(blank=True, verbose_name='Client key'),),
|
||||
('ca_cert', common.db.fields.EncryptTextField(blank=True, verbose_name='CA cert')),
|
||||
('client_cert', common.db.fields.EncryptTextField(blank=True, verbose_name='Client cert')),
|
||||
('client_key', common.db.fields.EncryptTextField(blank=True, verbose_name='Client key'),),
|
||||
('use_ssl', models.BooleanField(default=False, verbose_name='Use SSL'),),
|
||||
],
|
||||
options={
|
||||
|
|
|
@ -34,6 +34,13 @@ def migrate_macos_platform(apps, schema_editor):
|
|||
platform_model.objects.using(db_alias).filter(id=old_macos.id).delete()
|
||||
|
||||
|
||||
def migrate_connectivity(apps, schema_editor):
|
||||
db_alias = schema_editor.connection.alias
|
||||
asset_model = apps.get_model('assets', 'Asset')
|
||||
asset_model.objects.using(db_alias).filter(connectivity='unknown').update(connectivity='-')
|
||||
asset_model.objects.using(db_alias).filter(connectivity='failed').update(connectivity='err')
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
dependencies = [
|
||||
('assets', '0096_auto_20220426_1550'),
|
||||
|
@ -43,4 +50,5 @@ class Migration(migrations.Migration):
|
|||
migrations.RunPython(create_internal_platforms),
|
||||
migrations.RunPython(update_user_platforms),
|
||||
migrations.RunPython(migrate_macos_platform),
|
||||
migrations.RunPython(migrate_connectivity),
|
||||
]
|
||||
|
|
|
@ -2,16 +2,15 @@
|
|||
|
||||
import time
|
||||
from django.db import migrations
|
||||
from assets.models import Platform
|
||||
|
||||
|
||||
def migrate_accounts(apps, schema_editor):
|
||||
def migrate_asset_accounts(apps, schema_editor):
|
||||
auth_book_model = apps.get_model('assets', 'AuthBook')
|
||||
account_model = apps.get_model('accounts', 'Account')
|
||||
|
||||
count = 0
|
||||
bulk_size = 1000
|
||||
print("\n\tStart migrate accounts")
|
||||
print("\n\tStart migrate asset accounts")
|
||||
while True:
|
||||
start = time.time()
|
||||
auth_books = auth_book_model.objects \
|
||||
|
@ -71,11 +70,76 @@ def migrate_accounts(apps, schema_editor):
|
|||
accounts.append(account)
|
||||
|
||||
account_model.objects.bulk_create(accounts, ignore_conflicts=True)
|
||||
print("\t - Create accounts: {}-{} using: {:.2f}s".format(
|
||||
print("\t - Create asset accounts: {}-{} using: {:.2f}s".format(
|
||||
count - len(auth_books), count, time.time() - start
|
||||
))
|
||||
|
||||
|
||||
def migrate_db_accounts(apps, schema_editor):
|
||||
app_perm_model = apps.get_model('perms', 'ApplicationPermission')
|
||||
account_model = apps.get_model('accounts', 'Account')
|
||||
perms = app_perm_model.objects.filter(category__in=['db', 'cloud'])
|
||||
|
||||
same_attrs = [
|
||||
'id', 'username', 'comment', 'date_created', 'date_updated',
|
||||
'created_by', 'org_id',
|
||||
]
|
||||
auth_attrs = ['password', 'private_key', 'token']
|
||||
all_attrs = same_attrs + auth_attrs
|
||||
|
||||
print("\n\tStart migrate app accounts")
|
||||
|
||||
index = 0
|
||||
total = perms.count()
|
||||
|
||||
for perm in perms:
|
||||
index += 1
|
||||
start = time.time()
|
||||
|
||||
system_users = perm.system_users.all()
|
||||
accounts = []
|
||||
for s in system_users:
|
||||
values = {'version': 1}
|
||||
values.update({attr: getattr(s, attr, '') for attr in all_attrs})
|
||||
values['created_by'] = str(s.id)
|
||||
|
||||
auth_infos = []
|
||||
username = values['username']
|
||||
for attr in auth_attrs:
|
||||
secret = values.pop(attr, None)
|
||||
if not secret:
|
||||
continue
|
||||
|
||||
if attr == 'private_key':
|
||||
secret_type = 'ssh_key'
|
||||
name = f'{username}(ssh key)'
|
||||
elif attr == 'token':
|
||||
secret_type = 'token'
|
||||
name = f'{username}(token)'
|
||||
else:
|
||||
secret_type = attr
|
||||
name = username
|
||||
auth_infos.append((name, secret_type, secret))
|
||||
|
||||
if not auth_infos:
|
||||
auth_infos.append((username, 'password', ''))
|
||||
|
||||
for name, secret_type, secret in auth_infos:
|
||||
account = account_model(**values, name=name, secret=secret, secret_type=secret_type)
|
||||
accounts.append(account)
|
||||
|
||||
apps = perm.applications.all()
|
||||
for app in apps:
|
||||
for account in accounts:
|
||||
setattr(account, 'asset_id', str(app.id))
|
||||
|
||||
account_model.objects.bulk_create(accounts, ignore_conflicts=True)
|
||||
|
||||
print("\t - Progress ({}/{}), Create app accounts: {} using: {:.2f}s".format(
|
||||
index, total, len(accounts), time.time() - start
|
||||
))
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
dependencies = [
|
||||
('accounts', '0001_initial'),
|
||||
|
@ -83,5 +147,6 @@ class Migration(migrations.Migration):
|
|||
]
|
||||
|
||||
operations = [
|
||||
migrations.RunPython(migrate_accounts),
|
||||
migrations.RunPython(migrate_asset_accounts),
|
||||
migrations.RunPython(migrate_db_accounts),
|
||||
]
|
||||
|
|
|
@ -1,5 +1,4 @@
|
|||
# Generated by Django 3.2.14 on 2022-08-11 07:11
|
||||
import assets.models.platform
|
||||
import django.db.models
|
||||
from django.db import migrations, models
|
||||
|
||||
|
|
|
@ -18,6 +18,8 @@ def _create_account_obj(secret, secret_type, gateway, asset, account_model):
|
|||
|
||||
def migrate_gateway_to_asset(apps, schema_editor):
|
||||
db_alias = schema_editor.connection.alias
|
||||
node_model = apps.get_model('assets', 'Node')
|
||||
org_model = apps.get_model('orgs', 'Organization')
|
||||
gateway_model = apps.get_model('assets', 'Gateway')
|
||||
platform_model = apps.get_model('assets', 'Platform')
|
||||
gateway_platform = platform_model.objects.using(db_alias).get(name=GATEWAY_NAME)
|
||||
|
@ -28,6 +30,16 @@ def migrate_gateway_to_asset(apps, schema_editor):
|
|||
asset_model = apps.get_model('assets', 'Asset')
|
||||
protocol_model = apps.get_model('assets', 'Protocol')
|
||||
gateways = gateway_model.objects.all()
|
||||
|
||||
org_ids = gateways.order_by('org_id').values_list('org_id', flat=True).distinct()
|
||||
node_dict = {}
|
||||
for org_id in org_ids:
|
||||
org = org_model.objects.using(db_alias).filter(id=org_id).first()
|
||||
node = node_model.objects.using(db_alias).filter(
|
||||
org_id=org_id, value=org.name, full_value=f'/{org.name}'
|
||||
).first()
|
||||
node_dict[org_id] = node
|
||||
|
||||
for gateway in gateways:
|
||||
comment = gateway.comment if gateway.comment else ''
|
||||
data = {
|
||||
|
@ -40,6 +52,8 @@ def migrate_gateway_to_asset(apps, schema_editor):
|
|||
'platform': gateway_platform,
|
||||
}
|
||||
asset = asset_model.objects.using(db_alias).create(**data)
|
||||
node = node_dict.get(str(gateway.org_id))
|
||||
asset.nodes.set([node])
|
||||
asset_dict[gateway.id] = asset
|
||||
protocol_model.objects.using(db_alias).create(name='ssh', port=gateway.port, asset=asset)
|
||||
hosts = [host_model(asset_ptr=asset) for asset in asset_dict.values()]
|
||||
|
|
|
@ -52,9 +52,6 @@ class Migration(migrations.Migration):
|
|||
migrations.DeleteModel(
|
||||
name='Cluster',
|
||||
),
|
||||
migrations.DeleteModel(
|
||||
name='AdminUser',
|
||||
),
|
||||
migrations.DeleteModel(
|
||||
name='HistoricalAuthBook',
|
||||
),
|
||||
|
|
|
@ -52,11 +52,7 @@ class Migration(migrations.Migration):
|
|||
migrations.AlterModelOptions(
|
||||
name='asset',
|
||||
options={'ordering': ['name'],
|
||||
'permissions': [('refresh_assethardwareinfo', 'Can refresh asset hardware info'),
|
||||
('test_assetconnectivity', 'Can test asset connectivity'),
|
||||
('push_assetaccount', 'Can push account to asset'),
|
||||
('match_asset', 'Can match asset'), ('add_assettonode', 'Add asset to node'),
|
||||
('move_assettonode', 'Move asset to node')], 'verbose_name': 'Asset'},
|
||||
'permissions': [('refresh_assethardwareinfo', 'Can refresh asset hardware info'), ('test_assetconnectivity', 'Can test asset connectivity'), ('push_assetaccount', 'Can push account to asset'), ('test_account', 'Can verify account'), ('match_asset', 'Can match asset'), ('add_assettonode', 'Add asset to node'), ('move_assettonode', 'Move asset to node')], 'verbose_name': 'Asset'},
|
||||
),
|
||||
migrations.AlterUniqueTogether(
|
||||
name='accountbackupplan',
|
||||
|
|
|
@ -35,7 +35,7 @@ class Migration(migrations.Migration):
|
|||
],
|
||||
options={
|
||||
'verbose_name': 'Automation task',
|
||||
'unique_together': {('org_id', 'name')},
|
||||
'unique_together': {('org_id', 'name', 'type')},
|
||||
},
|
||||
),
|
||||
migrations.CreateModel(
|
||||
|
@ -93,18 +93,4 @@ class Migration(migrations.Migration):
|
|||
name='automation',
|
||||
field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, related_name='executions', to='assets.baseautomation', verbose_name='Automation task'),
|
||||
),
|
||||
migrations.AlterUniqueTogether(
|
||||
name='baseautomation',
|
||||
unique_together={('org_id', 'name', 'type')},
|
||||
),
|
||||
migrations.AlterModelOptions(
|
||||
name='asset',
|
||||
options={'ordering': ['name'],
|
||||
'permissions': [('refresh_assethardwareinfo', 'Can refresh asset hardware info'),
|
||||
('test_assetconnectivity', 'Can test asset connectivity'),
|
||||
('push_assetaccount', 'Can push account to asset'),
|
||||
('test_account', 'Can verify account'), ('match_asset', 'Can match asset'),
|
||||
('add_assettonode', 'Add asset to node'),
|
||||
('move_assettonode', 'Move asset to node')], 'verbose_name': 'Asset'},
|
||||
),
|
||||
]
|
||||
]
|
||||
|
|
|
@ -17,7 +17,23 @@ __all__ = ['SystemUser']
|
|||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class SystemUser(OrgModelMixin):
|
||||
class OldBaseUser(models.Model):
|
||||
id = models.UUIDField(default=uuid.uuid4, primary_key=True)
|
||||
name = models.CharField(max_length=128, verbose_name=_('Name'))
|
||||
username = models.CharField(max_length=128, blank=True, verbose_name=_('Username'), db_index=True)
|
||||
password = fields.EncryptCharField(max_length=256, blank=True, null=True, verbose_name=_('Password'))
|
||||
private_key = fields.EncryptTextField(blank=True, null=True, verbose_name=_('SSH private key'))
|
||||
public_key = fields.EncryptTextField(blank=True, null=True, verbose_name=_('SSH public key'))
|
||||
comment = models.TextField(blank=True, verbose_name=_('Comment'))
|
||||
date_created = models.DateTimeField(auto_now_add=True, verbose_name=_("Date created"))
|
||||
date_updated = models.DateTimeField(auto_now=True, verbose_name=_("Date updated"))
|
||||
created_by = models.CharField(max_length=128, null=True, verbose_name=_('Created by'))
|
||||
|
||||
class Meta:
|
||||
abstract = True
|
||||
|
||||
|
||||
class SystemUser(OrgModelMixin, OldBaseUser):
|
||||
LOGIN_AUTO = 'auto'
|
||||
LOGIN_MANUAL = 'manual'
|
||||
LOGIN_MODE_CHOICES = (
|
||||
|
@ -29,19 +45,7 @@ class SystemUser(OrgModelMixin):
|
|||
common = 'common', _('Common user')
|
||||
admin = 'admin', _('Admin user')
|
||||
|
||||
id = models.UUIDField(default=uuid.uuid4, primary_key=True)
|
||||
name = models.CharField(max_length=128, verbose_name=_('Name'))
|
||||
username = models.CharField(max_length=128, blank=True, verbose_name=_('Username'), db_index=True)
|
||||
password = fields.EncryptCharField(max_length=256, blank=True, null=True, verbose_name=_('Password'))
|
||||
private_key = fields.EncryptTextField(blank=True, null=True, verbose_name=_('SSH private key'))
|
||||
public_key = fields.EncryptTextField(blank=True, null=True, verbose_name=_('SSH public key'))
|
||||
token = models.TextField(default='', verbose_name=_('Token'))
|
||||
|
||||
comment = models.TextField(blank=True, verbose_name=_('Comment'))
|
||||
date_created = models.DateTimeField(auto_now_add=True, verbose_name=_("Date created"))
|
||||
date_updated = models.DateTimeField(auto_now=True, verbose_name=_("Date updated"))
|
||||
created_by = models.CharField(max_length=128, null=True, verbose_name=_('Created by'))
|
||||
|
||||
username_same_with_user = models.BooleanField(default=False, verbose_name=_("Username same with user"))
|
||||
type = models.CharField(max_length=16, choices=Type.choices, default=Type.common, verbose_name=_('Type'))
|
||||
priority = models.IntegerField(default=81, verbose_name=_("Priority"), help_text=_("1-100, the lower the value will be match first"), validators=[MinValueValidator(1), MaxValueValidator(100)])
|
||||
|
@ -66,3 +70,26 @@ class SystemUser(OrgModelMixin):
|
|||
permissions = [
|
||||
('match_systemuser', _('Can match system user')),
|
||||
]
|
||||
|
||||
|
||||
# Deprecated: 准备废弃
|
||||
class AdminUser(OrgModelMixin, OldBaseUser):
|
||||
"""
|
||||
A privileged user that ansible can use it to push system user and so on
|
||||
"""
|
||||
BECOME_METHOD_CHOICES = (
|
||||
('sudo', 'sudo'),
|
||||
('su', 'su'),
|
||||
)
|
||||
become = models.BooleanField(default=True)
|
||||
become_method = models.CharField(choices=BECOME_METHOD_CHOICES, default='sudo', max_length=4)
|
||||
become_user = models.CharField(default='root', max_length=64)
|
||||
_become_pass = models.CharField(default='', blank=True, max_length=128)
|
||||
|
||||
def __str__(self):
|
||||
return self.name
|
||||
|
||||
class Meta:
|
||||
ordering = ['name']
|
||||
unique_together = [('name', 'org_id')]
|
||||
verbose_name = _("Admin user")
|
|
@ -10,6 +10,7 @@ from django.db import models
|
|||
from django.utils.translation import ugettext_lazy as _
|
||||
|
||||
from assets import const
|
||||
from common.db.fields import EncryptMixin
|
||||
from common.utils import lazyproperty
|
||||
from orgs.mixins.models import OrgManager, JMSOrgBaseModel
|
||||
from ..base import AbsConnectivity
|
||||
|
@ -112,45 +113,47 @@ class Asset(NodesRelationMixin, AbsConnectivity, JMSOrgBaseModel):
|
|||
verbose_name=_("Nodes"))
|
||||
is_active = models.BooleanField(default=True, verbose_name=_('Is active'))
|
||||
labels = models.ManyToManyField('assets.Label', blank=True, related_name='assets', verbose_name=_("Labels"))
|
||||
info = models.JSONField(verbose_name='Info', default=dict, blank=True)
|
||||
info = models.JSONField(verbose_name='Info', default=dict, blank=True) # 资产的一些信息,如 硬件信息
|
||||
|
||||
objects = AssetManager.from_queryset(AssetQuerySet)()
|
||||
|
||||
def __str__(self):
|
||||
return '{0.name}({0.address})'.format(self)
|
||||
|
||||
@property
|
||||
def specific(self):
|
||||
instance = getattr(self, self.category, None)
|
||||
if not instance:
|
||||
return {}
|
||||
specific_fields = self.get_specific_fields(instance)
|
||||
@staticmethod
|
||||
def get_spec_values(instance, fields):
|
||||
info = {}
|
||||
for i in specific_fields:
|
||||
for i in fields:
|
||||
v = getattr(instance, i.name)
|
||||
if isinstance(i, models.JSONField) and not isinstance(v, (list, dict)):
|
||||
v = json.loads(v)
|
||||
info[i.name] = v
|
||||
return info
|
||||
|
||||
@property
|
||||
@lazyproperty
|
||||
def spec_info(self):
|
||||
instance = getattr(self, self.category, None)
|
||||
if not instance:
|
||||
return []
|
||||
specific_fields = self.get_specific_fields(instance)
|
||||
info = [
|
||||
{
|
||||
'label': i.verbose_name,
|
||||
'name': i.name,
|
||||
'value': getattr(instance, i.name)
|
||||
}
|
||||
for i in specific_fields
|
||||
]
|
||||
return info
|
||||
return {}
|
||||
spec_fields = self.get_spec_fields(instance)
|
||||
return self.get_spec_values(instance, spec_fields)
|
||||
|
||||
@staticmethod
|
||||
def get_spec_fields(instance, secret=False):
|
||||
spec_fields = [i for i in instance._meta.local_fields if i.name != 'asset_ptr']
|
||||
spec_fields = [i for i in spec_fields if isinstance(i, EncryptMixin) == secret]
|
||||
return spec_fields
|
||||
|
||||
@lazyproperty
|
||||
def enabled_info(self):
|
||||
def secret_info(self):
|
||||
instance = getattr(self, self.category, None)
|
||||
if not instance:
|
||||
return {}
|
||||
spec_fields = self.get_spec_fields(instance, secret=True)
|
||||
return self.get_spec_values(instance, spec_fields)
|
||||
|
||||
@lazyproperty
|
||||
def auto_info(self):
|
||||
platform = self.platform
|
||||
automation = self.platform.automation
|
||||
return {
|
||||
|
@ -164,11 +167,6 @@ class Asset(NodesRelationMixin, AbsConnectivity, JMSOrgBaseModel):
|
|||
'gather_accounts_enabled': automation.gather_accounts_enabled,
|
||||
}
|
||||
|
||||
@staticmethod
|
||||
def get_specific_fields(instance):
|
||||
specific_fields = [i for i in instance._meta.local_fields if i.name != 'asset_ptr']
|
||||
return specific_fields
|
||||
|
||||
def get_target_ip(self):
|
||||
return self.address
|
||||
|
||||
|
|
|
@ -1,28 +1,21 @@
|
|||
from django.db import models
|
||||
from django.utils.translation import gettext_lazy as _
|
||||
|
||||
from common.db.fields import EncryptTextField
|
||||
from .common import Asset
|
||||
|
||||
|
||||
class Database(Asset):
|
||||
db_name = models.CharField(max_length=1024, verbose_name=_("Database"), blank=True)
|
||||
use_ssl = models.BooleanField(default=False, verbose_name=_("Use SSL"))
|
||||
ca_cert = models.TextField(verbose_name=_("CA cert"), blank=True)
|
||||
client_cert = models.TextField(verbose_name=_("Client cert"), blank=True)
|
||||
client_key = models.TextField(verbose_name=_("Client key"), blank=True)
|
||||
ca_cert = EncryptTextField(verbose_name=_("CA cert"), blank=True)
|
||||
client_cert = EncryptTextField(verbose_name=_("Client cert"), blank=True)
|
||||
client_key = EncryptTextField(verbose_name=_("Client key"), blank=True)
|
||||
allow_invalid_cert = models.BooleanField(default=False, verbose_name=_('Allow invalid cert'))
|
||||
|
||||
def __str__(self):
|
||||
return '{}({}://{}/{})'.format(self.name, self.type, self.address, self.db_name)
|
||||
|
||||
@property
|
||||
def specific(self):
|
||||
return {
|
||||
'db_name': self.db_name,
|
||||
'use_ssl': self.use_ssl,
|
||||
'allow_invalid_cert': self.allow_invalid_cert,
|
||||
}
|
||||
|
||||
@property
|
||||
def ip(self):
|
||||
return self.address
|
||||
|
|
|
@ -68,7 +68,7 @@ class Platform(models.Model):
|
|||
"""
|
||||
|
||||
class CharsetChoices(models.TextChoices):
|
||||
utf8 = 'utf8', 'UTF-8'
|
||||
utf8 = 'utf-8', 'UTF-8'
|
||||
gbk = 'gbk', 'GBK'
|
||||
|
||||
name = models.SlugField(verbose_name=_("Name"), unique=True, allow_unicode=True)
|
||||
|
|
|
@ -6,10 +6,11 @@ from django.db.transaction import atomic
|
|||
from django.utils.translation import ugettext_lazy as _
|
||||
from rest_framework import serializers
|
||||
|
||||
from accounts.models import Account, AccountTemplate
|
||||
from accounts.models import Account
|
||||
from accounts.serializers import AccountSerializerCreateValidateMixin
|
||||
from common.serializers import WritableNestedModelSerializer, SecretReadableMixin, CommonModelSerializer
|
||||
from common.serializers.fields import LabeledChoiceField
|
||||
from common.utils import lazyproperty
|
||||
from orgs.mixins.serializers import BulkOrgResourceModelSerializer
|
||||
from ...const import Category, AllTypes
|
||||
from ...models import Asset, Node, Platform, Label, Protocol
|
||||
|
@ -18,7 +19,7 @@ __all__ = [
|
|||
'AssetSerializer', 'AssetSimpleSerializer', 'MiniAssetSerializer',
|
||||
'AssetTaskSerializer', 'AssetsTaskSerializer', 'AssetProtocolsSerializer',
|
||||
'AssetDetailSerializer', 'DetailMixin', 'AssetAccountSerializer',
|
||||
'AccountSecretSerializer'
|
||||
'AccountSecretSerializer', 'SpecSerializer'
|
||||
]
|
||||
|
||||
|
||||
|
@ -54,6 +55,10 @@ class AssetAccountSerializer(
|
|||
push_now = serializers.BooleanField(
|
||||
default=False, label=_("Push now"), write_only=True
|
||||
)
|
||||
template = serializers.BooleanField(
|
||||
default=False, label=_("Template"), write_only=True
|
||||
)
|
||||
name = serializers.CharField(max_length=128, required=False, label=_("Name"))
|
||||
|
||||
class Meta:
|
||||
model = Account
|
||||
|
@ -62,7 +67,7 @@ class AssetAccountSerializer(
|
|||
'version', 'secret_type',
|
||||
]
|
||||
fields_write_only = [
|
||||
'secret', 'push_now'
|
||||
'secret', 'push_now', 'template'
|
||||
]
|
||||
fields = fields_mini + fields_write_only
|
||||
extra_kwargs = {
|
||||
|
@ -74,33 +79,6 @@ class AssetAccountSerializer(
|
|||
value = self.initial_data.get('username')
|
||||
return value
|
||||
|
||||
@staticmethod
|
||||
def validate_template(value):
|
||||
try:
|
||||
return AccountTemplate.objects.get(id=value)
|
||||
except AccountTemplate.DoesNotExist:
|
||||
raise serializers.ValidationError(_('Account template not found'))
|
||||
|
||||
@staticmethod
|
||||
def replace_attrs(account_template: AccountTemplate, attrs: dict):
|
||||
exclude_fields = [
|
||||
'_state', 'org_id', 'id', 'date_created',
|
||||
'date_updated'
|
||||
]
|
||||
template_attrs = {
|
||||
k: v for k, v in account_template.__dict__.items()
|
||||
if k not in exclude_fields
|
||||
}
|
||||
for k, v in template_attrs.items():
|
||||
attrs.setdefault(k, v)
|
||||
|
||||
def create(self, validated_data):
|
||||
from accounts.tasks import push_accounts_to_assets
|
||||
instance = super().create(validated_data)
|
||||
if self.push_now:
|
||||
push_accounts_to_assets.delay([instance.id], [instance.asset_id])
|
||||
return instance
|
||||
|
||||
|
||||
class AccountSecretSerializer(SecretReadableMixin, CommonModelSerializer):
|
||||
class Meta:
|
||||
|
@ -113,13 +91,25 @@ class AccountSecretSerializer(SecretReadableMixin, CommonModelSerializer):
|
|||
}
|
||||
|
||||
|
||||
class SpecSerializer(serializers.Serializer):
|
||||
# 数据库
|
||||
db_name = serializers.CharField(label=_("Database"), max_length=128, required=False)
|
||||
use_ssl = serializers.BooleanField(label=_("Use SSL"), required=False)
|
||||
allow_invalid_cert = serializers.BooleanField(label=_("Allow invalid cert"), required=False)
|
||||
# Web
|
||||
autofill = serializers.CharField(label=_("Auto fill"), required=False)
|
||||
username_selector = serializers.CharField(label=_("Username selector"), required=False)
|
||||
password_selector = serializers.CharField(label=_("Password selector"), required=False)
|
||||
submit_selector = serializers.CharField(label=_("Submit selector"), required=False)
|
||||
script = serializers.JSONField(label=_("Script"), required=False)
|
||||
|
||||
|
||||
class AssetSerializer(BulkOrgResourceModelSerializer, WritableNestedModelSerializer):
|
||||
category = LabeledChoiceField(choices=Category.choices, read_only=True, label=_('Category'))
|
||||
type = LabeledChoiceField(choices=AllTypes.choices(), read_only=True, label=_('Type'))
|
||||
labels = AssetLabelSerializer(many=True, required=False, label=_('Label'))
|
||||
protocols = AssetProtocolsSerializer(many=True, required=False, label=_('Protocols'))
|
||||
protocols = AssetProtocolsSerializer(many=True, required=False, label=_('Protocols'), default=())
|
||||
accounts = AssetAccountSerializer(many=True, required=False, write_only=True, label=_('Account'))
|
||||
enabled_info = serializers.DictField(read_only=True, label=_('Enabled info'))
|
||||
|
||||
class Meta:
|
||||
model = Asset
|
||||
|
@ -127,12 +117,12 @@ class AssetSerializer(BulkOrgResourceModelSerializer, WritableNestedModelSeriali
|
|||
fields_small = fields_mini + ['is_active', 'comment']
|
||||
fields_fk = ['domain', 'platform']
|
||||
fields_m2m = [
|
||||
'nodes', 'labels', 'protocols', 'nodes_display', 'accounts'
|
||||
'nodes', 'labels', 'protocols',
|
||||
'nodes_display', 'accounts'
|
||||
]
|
||||
read_only_fields = [
|
||||
'category', 'type', 'info', 'enabled_info',
|
||||
'connectivity', 'date_verified',
|
||||
'created_by', 'date_created'
|
||||
'category', 'type', 'connectivity',
|
||||
'date_verified', 'created_by', 'date_created'
|
||||
]
|
||||
fields = fields_small + fields_fk + fields_m2m + read_only_fields
|
||||
extra_kwargs = {
|
||||
|
@ -145,15 +135,36 @@ class AssetSerializer(BulkOrgResourceModelSerializer, WritableNestedModelSeriali
|
|||
super().__init__(*args, **kwargs)
|
||||
self._init_field_choices()
|
||||
|
||||
def _get_protocols_required_default(self):
|
||||
platform = self._initial_data_platform
|
||||
platform_protocols = platform.protocols.all()
|
||||
protocols_default = [p for p in platform_protocols if p.default]
|
||||
protocols_required = [p for p in platform_protocols if p.required or p.primary]
|
||||
return protocols_required, protocols_default
|
||||
|
||||
def _set_protocols_default(self):
|
||||
if not hasattr(self, 'initial_data'):
|
||||
return
|
||||
protocols = self.initial_data.get('protocols')
|
||||
if protocols is not None:
|
||||
return
|
||||
|
||||
protocols_required, protocols_default = self._get_protocols_required_default()
|
||||
protocols_data = [
|
||||
{'name': p.name, 'port': p.port}
|
||||
for p in protocols_required + protocols_default
|
||||
]
|
||||
self.initial_data['protocols'] = protocols_data
|
||||
|
||||
def _init_field_choices(self):
|
||||
request = self.context.get('request')
|
||||
if not request:
|
||||
return
|
||||
category = request.path.strip('/').split('/')[-1].rstrip('s')
|
||||
field_category = self.fields.get('category')
|
||||
field_category._choices = Category.filter_choices(category)
|
||||
field_category.choices = Category.filter_choices(category)
|
||||
field_type = self.fields.get('type')
|
||||
field_type._choices = AllTypes.filter_choices(category)
|
||||
field_type.choices = AllTypes.filter_choices(category)
|
||||
|
||||
@classmethod
|
||||
def setup_eager_loading(cls, queryset):
|
||||
|
@ -180,6 +191,26 @@ class AssetSerializer(BulkOrgResourceModelSerializer, WritableNestedModelSeriali
|
|||
nodes_to_set.append(node)
|
||||
instance.nodes.set(nodes_to_set)
|
||||
|
||||
@lazyproperty
|
||||
def _initial_data_platform(self):
|
||||
if self.instance:
|
||||
return self.instance.platform
|
||||
|
||||
platform_id = self.initial_data.get('platform')
|
||||
if isinstance(platform_id, dict):
|
||||
platform_id = platform_id.get('id') or platform_id.get('pk')
|
||||
platform = Platform.objects.filter(id=platform_id).first()
|
||||
if not platform:
|
||||
raise serializers.ValidationError({'platform': _("Platform not exist")})
|
||||
return platform
|
||||
|
||||
def validate_domain(self, value):
|
||||
platform = self._initial_data_platform
|
||||
if platform.domain_enabled:
|
||||
return value
|
||||
else:
|
||||
return None
|
||||
|
||||
def validate_nodes(self, nodes):
|
||||
if nodes:
|
||||
return nodes
|
||||
|
@ -190,27 +221,20 @@ class AssetSerializer(BulkOrgResourceModelSerializer, WritableNestedModelSeriali
|
|||
if not node_id:
|
||||
return []
|
||||
|
||||
def is_valid(self, raise_exception=False):
|
||||
self._set_protocols_default()
|
||||
return super().is_valid(raise_exception)
|
||||
|
||||
def validate_protocols(self, protocols_data):
|
||||
if not protocols_data:
|
||||
protocols_data = []
|
||||
platform_id = self.initial_data.get('platform')
|
||||
if isinstance(platform_id, dict):
|
||||
platform_id = platform_id.get('id') or platform_id.get('pk')
|
||||
platform = Platform.objects.filter(id=platform_id).first()
|
||||
if not platform:
|
||||
raise serializers.ValidationError({'platform': _("Platform not exist")})
|
||||
|
||||
# 目的是去重
|
||||
protocols_data_map = {p['name']: p for p in protocols_data}
|
||||
platform_protocols = platform.protocols.all()
|
||||
protocols_default = [p for p in platform_protocols if p.default]
|
||||
protocols_required = [p for p in platform_protocols if p.required or p.primary]
|
||||
|
||||
if not protocols_data_map:
|
||||
protocols_data_map = {
|
||||
p.name: {'name': p.name, 'port': p.port}
|
||||
for p in protocols_required + protocols_default
|
||||
}
|
||||
for p in protocols_data:
|
||||
port = p.get('port', 0)
|
||||
if port < 1 or port > 65535:
|
||||
error = p.get('name') + ': ' + _("port out of range (1-65535)")
|
||||
raise serializers.ValidationError(error)
|
||||
|
||||
protocols_required, protocols_default = self._get_protocols_required_default()
|
||||
protocols_not_found = [p.name for p in protocols_required if p.name not in protocols_data_map]
|
||||
if protocols_not_found:
|
||||
raise serializers.ValidationError({
|
||||
|
@ -218,10 +242,18 @@ class AssetSerializer(BulkOrgResourceModelSerializer, WritableNestedModelSeriali
|
|||
})
|
||||
return protocols_data_map.values()
|
||||
|
||||
@staticmethod
|
||||
def accounts_create(accounts_data, asset):
|
||||
for data in accounts_data:
|
||||
data['asset'] = asset
|
||||
AssetAccountSerializer().create(data)
|
||||
|
||||
@atomic
|
||||
def create(self, validated_data):
|
||||
nodes_display = validated_data.pop('nodes_display', '')
|
||||
accounts = validated_data.pop('accounts', [])
|
||||
instance = super().create(validated_data)
|
||||
self.accounts_create(accounts, instance)
|
||||
self.perform_nodes_display_create(instance, nodes_display)
|
||||
return instance
|
||||
|
||||
|
@ -235,11 +267,13 @@ class AssetSerializer(BulkOrgResourceModelSerializer, WritableNestedModelSeriali
|
|||
|
||||
class DetailMixin(serializers.Serializer):
|
||||
accounts = AssetAccountSerializer(many=True, required=False, label=_('Accounts'))
|
||||
spec_info = serializers.DictField(label=_('Spec info'), read_only=True)
|
||||
auto_info = serializers.DictField(read_only=True, label=_('Auto info'))
|
||||
|
||||
def get_field_names(self, declared_fields, info):
|
||||
names = super().get_field_names(declared_fields, info)
|
||||
names.extend([
|
||||
'accounts', 'info', 'specific', 'spec_info'
|
||||
'accounts', 'info', 'spec_info', 'auto_info'
|
||||
])
|
||||
return names
|
||||
|
||||
|
|
|
@ -1,3 +1,6 @@
|
|||
from rest_framework.serializers import ValidationError
|
||||
from django.utils.translation import ugettext_lazy as _
|
||||
|
||||
from assets.models import Database
|
||||
from .common import AssetSerializer
|
||||
from ..gateway import GatewayWithAccountSecretSerializer
|
||||
|
@ -14,6 +17,13 @@ class DatabaseSerializer(AssetSerializer):
|
|||
]
|
||||
fields = AssetSerializer.Meta.fields + extra_fields
|
||||
|
||||
def validate(self, attrs):
|
||||
platform = attrs.get('platform')
|
||||
if platform and getattr(platform, 'type') == 'mongodb' \
|
||||
and not attrs.get('db_name'):
|
||||
raise ValidationError({'db_name': _('This field is required.')})
|
||||
return attrs
|
||||
|
||||
|
||||
class DatabaseWithGatewaySerializer(DatabaseSerializer):
|
||||
gateway = GatewayWithAccountSecretSerializer()
|
||||
|
|
|
@ -19,13 +19,10 @@ class HostInfoSerializer(serializers.Serializer):
|
|||
cpu_vcpus = serializers.IntegerField(required=False, label=_('CPU vcpus'))
|
||||
memory = serializers.CharField(max_length=64, allow_blank=True, required=False, label=_('Memory'))
|
||||
disk_total = serializers.CharField(max_length=1024, allow_blank=True, required=False, label=_('Disk total'))
|
||||
disk_info = serializers.CharField(max_length=1024, allow_blank=True, required=False, label=_('Disk info'))
|
||||
|
||||
os = serializers.CharField(max_length=128, allow_blank=True, required=False, label=_('OS'))
|
||||
os_version = serializers.CharField(max_length=16, allow_blank=True, required=False, label=_('OS version'))
|
||||
os_arch = serializers.CharField(max_length=16, allow_blank=True, required=False, label=_('OS arch'))
|
||||
hostname_raw = serializers.CharField(max_length=128, allow_blank=True, required=False, label=_('Hostname raw'))
|
||||
number = serializers.CharField(max_length=128, allow_blank=True, required=False, label=_('Asset number'))
|
||||
distribution = serializers.CharField(max_length=128, allow_blank=True, required=False, label=_('OS'))
|
||||
distribution_version = serializers.CharField(max_length=16, allow_blank=True, required=False, label=_('OS version'))
|
||||
arch = serializers.CharField(max_length=16, allow_blank=True, required=False, label=_('OS arch'))
|
||||
|
||||
|
||||
class HostSerializer(AssetSerializer):
|
||||
|
|
|
@ -34,6 +34,13 @@ class DomainSerializer(BulkOrgResourceModelSerializer):
|
|||
data['assets'] = [i for i in assets if str(i['id']) not in gateway_ids]
|
||||
return data
|
||||
|
||||
def update(self, instance, validated_data):
|
||||
assets = validated_data.pop('assets', [])
|
||||
assets = assets + list(instance.gateways)
|
||||
validated_data['assets'] = assets
|
||||
instance = super().update(instance, validated_data)
|
||||
return instance
|
||||
|
||||
|
||||
class DomainWithGatewaySerializer(serializers.ModelSerializer):
|
||||
gateways = GatewayWithAccountSecretSerializer(many=True, read_only=True)
|
||||
|
|
|
@ -51,18 +51,19 @@ class PlatformAutomationSerializer(serializers.ModelSerializer):
|
|||
"gather_accounts_enabled", "gather_accounts_method",
|
||||
]
|
||||
extra_kwargs = {
|
||||
"ping_enabled": {"label": "启用资产探测"},
|
||||
"ping_method": {"label": "资产探测方式"},
|
||||
"gather_facts_enabled": {"label": "收集资产信息"},
|
||||
"gather_facts_method": {"label": "收集信息方式"},
|
||||
"verify_account_enabled": {"label": "启用校验账号"},
|
||||
"verify_account_method": {"label": "校验账号方式"},
|
||||
"change_secret_enabled": {"label": "启用账号改密"},
|
||||
"change_secret_method": {"label": "账号改密方式"},
|
||||
"push_account_enabled": {"label": "启用推送账号"},
|
||||
"push_account_method": {"label": "推送账号方式"},
|
||||
"gather_accounts_enabled": {"label": "启用账号收集"},
|
||||
"gather_accounts_method": {"label": "收集账号方式"},
|
||||
# 启用资产探测
|
||||
"ping_enabled": {"label": _("Ping enabled")},
|
||||
"ping_method": {"label": _("Ping method")},
|
||||
"gather_facts_enabled": {"label": _("Gather facts enabled")},
|
||||
"gather_facts_method": {"label": _("Gather facts method")},
|
||||
"verify_account_enabled": {"label": _("Verify account enabled")},
|
||||
"verify_account_method": {"label": _("Verify account method")},
|
||||
"change_secret_enabled": {"label": _("Change secret enabled")},
|
||||
"change_secret_method": {"label": _("Change secret method")},
|
||||
"push_account_enabled": {"label": _("Push account enabled")},
|
||||
"push_account_method": {"label": _("Push account method")},
|
||||
"gather_accounts_enabled": {"label": _("Gather accounts enabled")},
|
||||
"gather_accounts_method": {"label": _("Gather accounts method")},
|
||||
}
|
||||
|
||||
|
||||
|
@ -91,7 +92,7 @@ class PlatformSerializer(WritableNestedModelSerializer):
|
|||
automation = PlatformAutomationSerializer(label=_("Automation"), required=False)
|
||||
su_method = LabeledChoiceField(
|
||||
choices=[("sudo", "sudo su -"), ("su", "su - ")],
|
||||
label="切换方式", required=False, default="sudo", allow_null=True
|
||||
label=_("Su method"), required=False, default="sudo", allow_null=True
|
||||
)
|
||||
|
||||
class Meta:
|
||||
|
@ -107,9 +108,9 @@ class PlatformSerializer(WritableNestedModelSerializer):
|
|||
"comment",
|
||||
]
|
||||
extra_kwargs = {
|
||||
"su_enabled": {"label": "启用切换账号"},
|
||||
"domain_enabled": {"label": "启用网域"},
|
||||
"domain_default": {"label": "默认网域"},
|
||||
"su_enabled": {"label": _('Su enabled')},
|
||||
"domain_enabled": {"label": _('Domain enabled')},
|
||||
"domain_default": {"label": _('Default Domain')},
|
||||
}
|
||||
|
||||
@classmethod
|
||||
|
|
|
@ -131,7 +131,7 @@ class OperatorLogHandler(metaclass=Singleton):
|
|||
return before, after
|
||||
|
||||
def create_or_update_operate_log(
|
||||
self, action, resource_type, resource=None,
|
||||
self, action, resource_type, resource=None, resource_display=None,
|
||||
force=False, log_id=None, before=None, after=None,
|
||||
object_name=None
|
||||
):
|
||||
|
@ -140,7 +140,9 @@ class OperatorLogHandler(metaclass=Singleton):
|
|||
return
|
||||
|
||||
remote_addr = get_request_ip(current_request)
|
||||
resource_display = self.get_resource_display(resource)
|
||||
if resource_display is None:
|
||||
resource_display = self.get_resource_display(resource)
|
||||
resource_id = getattr(resource, 'pk', '')
|
||||
before, after = self.data_processing(before, after)
|
||||
if not force and not any([before, after]):
|
||||
# 前后都没变化,没必要生成日志,除非手动强制保存
|
||||
|
@ -148,9 +150,10 @@ class OperatorLogHandler(metaclass=Singleton):
|
|||
|
||||
data = {
|
||||
'id': log_id, "user": str(user), 'action': action,
|
||||
'resource_type': str(resource_type), 'resource': resource_display,
|
||||
'resource_type': str(resource_type),
|
||||
'resource_id': resource_id, 'resource': resource_display,
|
||||
'remote_addr': remote_addr, 'before': before, 'after': after,
|
||||
'org_id': get_current_org_id(), 'resource_id': str(resource.id)
|
||||
'org_id': get_current_org_id(),
|
||||
}
|
||||
with transaction.atomic():
|
||||
if self.log_client.ping(timeout=1):
|
||||
|
|
|
@ -47,4 +47,9 @@ class Migration(migrations.Migration):
|
|||
migrations.RunPython(migrate_operate_log_after_before),
|
||||
migrations.RemoveField(model_name='operatelog', name='after', ),
|
||||
migrations.RemoveField(model_name='operatelog', name='before', ),
|
||||
migrations.AlterField(
|
||||
model_name='operatelog',
|
||||
name='resource_id',
|
||||
field=models.CharField(blank=True, db_index=True, default='', max_length=128, verbose_name='Resource'),
|
||||
),
|
||||
]
|
||||
|
|
|
@ -55,7 +55,7 @@ class OperateLog(OrgModelMixin):
|
|||
resource_type = models.CharField(max_length=64, verbose_name=_("Resource Type"))
|
||||
resource = models.CharField(max_length=128, verbose_name=_("Resource"))
|
||||
resource_id = models.CharField(
|
||||
max_length=36, blank=True, default='', db_index=True,
|
||||
max_length=128, blank=True, default='', db_index=True,
|
||||
verbose_name=_("Resource")
|
||||
)
|
||||
remote_addr = models.CharField(max_length=128, verbose_name=_("Remote addr"), blank=True, null=True)
|
||||
|
|
|
@ -27,9 +27,8 @@ from common.signals import django_ready
|
|||
from common.utils import get_request_ip, get_logger, get_syslogger
|
||||
from common.utils.encode import data_to_json
|
||||
from jumpserver.utils import current_request
|
||||
from terminal.backends.command.serializers import SessionCommandSerializer
|
||||
from terminal.models import Session, Command
|
||||
from terminal.serializers import SessionSerializer
|
||||
from terminal.serializers import SessionSerializer, SessionCommandSerializer
|
||||
from users.models import User
|
||||
from users.signals import post_user_change_password
|
||||
from . import models, serializers
|
||||
|
@ -124,8 +123,7 @@ def signal_of_operate_log_whether_continue(sender, instance, created, update_fie
|
|||
if instance._meta.object_name == 'Terminal' and created:
|
||||
condition = False
|
||||
# last_login 改变是最后登录日期, 每次登录都会改变
|
||||
if instance._meta.object_name == 'User' and \
|
||||
update_fields and 'last_login' in update_fields:
|
||||
if instance._meta.object_name == 'User' and update_fields and 'last_login' in update_fields:
|
||||
condition = False
|
||||
# 不在记录白名单中,跳过
|
||||
if sender._meta.object_name not in MODELS_NEED_RECORD:
|
||||
|
@ -140,8 +138,12 @@ def on_object_pre_create_or_update(sender, instance=None, raw=False, using=None,
|
|||
)
|
||||
if not ok:
|
||||
return
|
||||
instance_before_data = {'id': instance.id}
|
||||
raw_instance = type(instance).objects.filter(pk=instance.id).first()
|
||||
|
||||
# users.PrivateToken Model 没有 id 有 pk字段
|
||||
instance_id = getattr(instance, 'id', getattr(instance, 'pk', None))
|
||||
instance_before_data = {'id': instance_id}
|
||||
raw_instance = type(instance).objects.filter(pk=instance_id).first()
|
||||
|
||||
if raw_instance:
|
||||
instance_before_data = model_to_dict(raw_instance)
|
||||
operate_log_id = str(uuid.uuid4())
|
||||
|
@ -297,7 +299,7 @@ def on_django_start_set_operate_log_monitor_models(sender, **kwargs):
|
|||
}
|
||||
exclude_models = {
|
||||
'UserPasswordHistory', 'ContentType',
|
||||
'SiteMessage', 'SiteMessageUsers',
|
||||
'MessageContent', 'SiteMessage',
|
||||
'PlatformAutomation', 'PlatformProtocol', 'Protocol',
|
||||
'HistoricalAccount', 'GatheredUser', 'ApprovalRule',
|
||||
'BaseAutomation', 'CeleryTask', 'Command', 'JobAuditLog',
|
||||
|
|
|
@ -9,6 +9,7 @@ from ops.celery.decorator import (
|
|||
)
|
||||
from .models import UserLoginLog, OperateLog, FTPLog, ActivityLog
|
||||
from common.utils import get_log_keep_day
|
||||
from django.utils.translation import gettext_lazy as _
|
||||
|
||||
|
||||
def clean_login_log_period():
|
||||
|
@ -39,8 +40,8 @@ def clean_ftp_log_period():
|
|||
FTPLog.objects.filter(date_start__lt=expired_day).delete()
|
||||
|
||||
|
||||
@register_as_period_task(interval=3600*24)
|
||||
@shared_task
|
||||
@register_as_period_task(interval=3600 * 24)
|
||||
@shared_task(verbose_name=_('Clean audits log'))
|
||||
def clean_audits_log_period():
|
||||
clean_login_log_period()
|
||||
clean_operation_log_period()
|
||||
|
|
|
@ -1,20 +1,17 @@
|
|||
import csv
|
||||
import codecs
|
||||
|
||||
import csv
|
||||
from itertools import chain
|
||||
|
||||
from django.http import HttpResponse
|
||||
from django.db import models
|
||||
from django.http import HttpResponse
|
||||
from django.utils.translation import gettext_lazy as _
|
||||
|
||||
from common.utils import validate_ip, get_ip_city, get_logger
|
||||
from audits.const import ActivityChoices
|
||||
from settings.serializers import SettingsSerializer
|
||||
from common.utils import validate_ip, get_ip_city, get_logger
|
||||
from common.db import fields
|
||||
from .const import DEFAULT_CITY
|
||||
from .signals import post_activity_log
|
||||
|
||||
|
||||
logger = get_logger(__name__)
|
||||
|
||||
|
||||
|
@ -110,7 +107,7 @@ def _get_instance_field_value(
|
|||
|
||||
|
||||
def model_to_dict_for_operate_log(
|
||||
instance, include_model_fields=True, include_related_fields=True
|
||||
instance, include_model_fields=True, include_related_fields=False
|
||||
):
|
||||
model_need_continue_fields = ['date_updated']
|
||||
m2m_need_continue_fields = ['history_passwords']
|
||||
|
@ -121,7 +118,7 @@ def model_to_dict_for_operate_log(
|
|||
|
||||
if include_related_fields:
|
||||
opts = instance._meta
|
||||
for f in chain(opts.many_to_many, opts.related_objects):
|
||||
for f in opts.many_to_many:
|
||||
value = []
|
||||
if instance.pk is not None:
|
||||
related_name = getattr(f, 'attname', '') or getattr(f, 'related_name', '')
|
||||
|
|
|
@ -15,10 +15,10 @@ from rest_framework.response import Response
|
|||
from rest_framework.serializers import ValidationError
|
||||
|
||||
from common.api import JMSModelViewSet
|
||||
from common.utils.http import is_true
|
||||
from common.exceptions import JMSException
|
||||
from common.utils import random_string
|
||||
from common.utils.django import get_request_os
|
||||
from common.exceptions import JMSException
|
||||
from common.utils.http import is_true
|
||||
from orgs.mixins.api import RootOrgViewMixin
|
||||
from perms.models import ActionChoices
|
||||
from terminal.connect_methods import NativeClient, ConnectMethodUtil
|
||||
|
@ -264,7 +264,7 @@ class ConnectionTokenViewSet(ExtraActionApiMixin, RootOrgViewMixin, JMSModelView
|
|||
msg = _('Account not found')
|
||||
raise JMSException(code='perm_account_invalid', detail=msg)
|
||||
if account.date_expired < timezone.now():
|
||||
msg = _('Permission Expired')
|
||||
msg = _('Permission expired')
|
||||
raise JMSException(code='perm_expired', detail=msg)
|
||||
return account
|
||||
|
||||
|
|
|
@ -31,7 +31,8 @@ class _ConnectionTokenAssetSerializer(serializers.ModelSerializer):
|
|||
model = Asset
|
||||
fields = [
|
||||
'id', 'name', 'address', 'protocols',
|
||||
'category', 'type', 'org_id', 'specific'
|
||||
'category', 'type', 'org_id', 'spec_info',
|
||||
'secret_info',
|
||||
]
|
||||
|
||||
|
||||
|
|
|
@ -5,9 +5,10 @@ from celery import shared_task
|
|||
from ops.celery.decorator import register_as_period_task
|
||||
from django.contrib.sessions.models import Session
|
||||
from django.utils import timezone
|
||||
from django.utils.translation import gettext_lazy as _
|
||||
|
||||
|
||||
@register_as_period_task(interval=3600*24)
|
||||
@shared_task
|
||||
@register_as_period_task(interval=3600 * 24)
|
||||
@shared_task(verbose_name=_('Clean expired session'))
|
||||
def clean_django_sessions():
|
||||
Session.objects.filter(expire_date__lt=timezone.now()).delete()
|
||||
|
|
|
@ -18,6 +18,7 @@ class CeleryBaseService(BaseService):
|
|||
os.environ.setdefault('ANSIBLE_FORCE_COLOR', 'True')
|
||||
os.environ.setdefault('ANSIBLE_CONFIG', ansible_config_path)
|
||||
os.environ.setdefault('ANSIBLE_LIBRARY', ansible_modules_path)
|
||||
os.environ.setdefault('PYTHONPATH', settings.APPS_DIR)
|
||||
|
||||
if os.getuid() == 0:
|
||||
os.environ.setdefault('C_FORCE_ROOT', '1')
|
||||
|
|
|
@ -1,15 +1,13 @@
|
|||
|
||||
from rest_framework import serializers
|
||||
from rest_framework.serializers import Serializer
|
||||
from rest_framework.serializers import ModelSerializer
|
||||
from rest_framework_bulk.serializers import BulkListSerializer
|
||||
from django.utils.translation import gettext_lazy as _
|
||||
from django.utils.functional import cached_property
|
||||
from django.utils.translation import gettext_lazy as _
|
||||
from drf_writable_nested.serializers import WritableNestedModelSerializer as NestedModelSerializer
|
||||
from rest_framework import serializers
|
||||
from rest_framework.serializers import ModelSerializer
|
||||
from rest_framework.serializers import Serializer
|
||||
from rest_framework_bulk.serializers import BulkListSerializer
|
||||
|
||||
from .mixin import BulkListSerializerMixin, BulkSerializerMixin
|
||||
|
||||
|
||||
__all__ = [
|
||||
'MethodSerializer', 'EmptySerializer', 'BulkModelSerializer',
|
||||
'AdaptedBulkListSerializer', 'CeleryTaskExecutionSerializer',
|
||||
|
|
|
@ -66,7 +66,7 @@ class LabeledChoiceField(ChoiceField):
|
|||
|
||||
def to_internal_value(self, data):
|
||||
if isinstance(data, dict):
|
||||
return data.get("value")
|
||||
data = data.get("value")
|
||||
return super(LabeledChoiceField, self).to_internal_value(data)
|
||||
|
||||
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
from collections import Iterable
|
||||
from collections import Iterable, defaultdict
|
||||
|
||||
from django.core.exceptions import ObjectDoesNotExist
|
||||
from django.db.models import NOT_PROVIDED
|
||||
|
@ -362,7 +362,7 @@ class CommonModelSerializer(CommonSerializerMixin, serializers.ModelSerializer):
|
|||
|
||||
|
||||
class CommonBulkSerializerMixin(BulkSerializerMixin, CommonSerializerMixin):
|
||||
pass
|
||||
_save_kwargs = defaultdict(dict)
|
||||
|
||||
|
||||
class CommonBulkModelSerializer(CommonBulkSerializerMixin, serializers.ModelSerializer):
|
||||
|
|
|
@ -1,15 +1,16 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
#
|
||||
import re
|
||||
import os
|
||||
import logging
|
||||
import re
|
||||
from collections import defaultdict
|
||||
|
||||
from django.conf import settings
|
||||
from django.core.signals import request_finished
|
||||
from django.db import connection
|
||||
from django.db.models.signals import pre_save
|
||||
from django.dispatch import receiver
|
||||
|
||||
from jumpserver.utils import get_current_request
|
||||
|
||||
from .local import thread_local
|
||||
|
||||
pattern = re.compile(r'FROM `(\w+)`')
|
||||
|
@ -83,6 +84,36 @@ def on_request_finished_release_local(sender, **kwargs):
|
|||
thread_local.__release_local__()
|
||||
|
||||
|
||||
def _get_request_user_name():
|
||||
user_name = 'System'
|
||||
current_request = get_current_request()
|
||||
if current_request and current_request.user.is_authenticated:
|
||||
user_name = current_request.user.name
|
||||
if isinstance(user_name, str):
|
||||
user_name = user_name[:30]
|
||||
return user_name
|
||||
|
||||
|
||||
@receiver(pre_save)
|
||||
def on_create_set_created_by(sender, instance=None, **kwargs):
|
||||
if getattr(instance, '_ignore_auto_created_by', False):
|
||||
return
|
||||
if not hasattr(instance, 'created_by') or instance.created_by:
|
||||
return
|
||||
user_name = _get_request_user_name()
|
||||
instance.created_by = user_name
|
||||
|
||||
|
||||
@receiver(pre_save)
|
||||
def on_update_set_updated_by(sender, instance=None, created=False, **kwargs):
|
||||
if getattr(instance, '_ignore_auto_updated_by', False):
|
||||
return
|
||||
if not hasattr(instance, 'updated_by'):
|
||||
return
|
||||
user_name = _get_request_user_name()
|
||||
instance.updated_by = user_name
|
||||
|
||||
|
||||
if settings.DEBUG_DEV:
|
||||
request_finished.connect(on_request_finished_logging_db_query)
|
||||
else:
|
||||
|
|
|
@ -8,12 +8,12 @@ from common.sdk.sms.endpoint import SMS
|
|||
from common.exceptions import JMSException
|
||||
from common.utils.random import random_string
|
||||
from common.utils import get_logger
|
||||
|
||||
from django.utils.translation import gettext_lazy as _
|
||||
|
||||
logger = get_logger(__file__)
|
||||
|
||||
|
||||
@shared_task
|
||||
@shared_task(verbose_name=_('Send email'))
|
||||
def send_async(sender):
|
||||
sender.gen_and_send()
|
||||
|
||||
|
|
|
@ -66,11 +66,11 @@ class RecordViewLogMixin:
|
|||
|
||||
def list(self, request, *args, **kwargs):
|
||||
response = super().list(request, *args, **kwargs)
|
||||
resource = self.get_resource_display(request)
|
||||
resource_display = self.get_resource_display(request)
|
||||
resource_type = self.model._meta.verbose_name
|
||||
create_or_update_operate_log(
|
||||
self.ACTION, resource_type, force=True,
|
||||
resource=resource
|
||||
resource_display=resource_display
|
||||
)
|
||||
return response
|
||||
|
||||
|
@ -78,7 +78,6 @@ class RecordViewLogMixin:
|
|||
response = super().retrieve(request, *args, **kwargs)
|
||||
resource_type = self.model._meta.verbose_name
|
||||
create_or_update_operate_log(
|
||||
self.ACTION, resource_type, force=True,
|
||||
resource=self.get_object()
|
||||
self.ACTION, resource_type, force=True, resource=self.get_object()
|
||||
)
|
||||
return response
|
||||
|
|
|
@ -1,3 +1,3 @@
|
|||
version https://git-lfs.github.com/spec/v1
|
||||
oid sha256:eb850ffd130e7cad2ea8c186f94a059c6a882dd1526f7a4c4a16d2fea2a1815b
|
||||
size 119290
|
||||
oid sha256:7e35d73f8576a0ea30a0da3886b24033f61f1019f6e15466d7b5904b5dd15ef9
|
||||
size 136075
|
||||
|
|
File diff suppressed because it is too large
Load Diff
|
@ -1,3 +1,3 @@
|
|||
version https://git-lfs.github.com/spec/v1
|
||||
oid sha256:4af8f2ead4a9d5aaf943efea76305d8cad1ff0692758d21a93937601c6f150fd
|
||||
size 105736
|
||||
oid sha256:1d3093d239e72a1ab35464fcdebd157330dbde7ae1cfd0f89a7d75c52eade900
|
||||
size 111883
|
||||
|
|
File diff suppressed because it is too large
Load Diff
|
@ -1,17 +1,16 @@
|
|||
from rest_framework.response import Response
|
||||
from rest_framework.mixins import ListModelMixin, RetrieveModelMixin
|
||||
from rest_framework.decorators import action
|
||||
from rest_framework.mixins import ListModelMixin, RetrieveModelMixin
|
||||
from rest_framework.response import Response
|
||||
|
||||
from common.utils.http import is_true
|
||||
from common.permissions import IsValidUser
|
||||
from common.const.http import GET, PATCH, POST
|
||||
from common.api import JMSGenericViewSet
|
||||
from common.const.http import GET, PATCH, POST
|
||||
from common.permissions import IsValidUser
|
||||
from common.utils.http import is_true
|
||||
from ..serializers import (
|
||||
SiteMessageDetailSerializer, SiteMessageIdsSerializer,
|
||||
SiteMessageSerializer, SiteMessageIdsSerializer,
|
||||
SiteMessageSendSerializer,
|
||||
)
|
||||
from ..site_msg import SiteMessageUtil
|
||||
from ..filters import SiteMsgFilter
|
||||
|
||||
__all__ = ('SiteMessageViewSet',)
|
||||
|
||||
|
@ -19,11 +18,11 @@ __all__ = ('SiteMessageViewSet',)
|
|||
class SiteMessageViewSet(ListModelMixin, RetrieveModelMixin, JMSGenericViewSet):
|
||||
permission_classes = (IsValidUser,)
|
||||
serializer_classes = {
|
||||
'default': SiteMessageDetailSerializer,
|
||||
'default': SiteMessageSerializer,
|
||||
'mark_as_read': SiteMessageIdsSerializer,
|
||||
'send': SiteMessageSendSerializer,
|
||||
}
|
||||
filterset_class = SiteMsgFilter
|
||||
filterset_fields = ('has_read',)
|
||||
|
||||
def get_queryset(self):
|
||||
user = self.request.user
|
||||
|
@ -44,9 +43,9 @@ class SiteMessageViewSet(ListModelMixin, RetrieveModelMixin, JMSGenericViewSet):
|
|||
@action(methods=[PATCH], detail=False, url_path='mark-as-read')
|
||||
def mark_as_read(self, request, **kwargs):
|
||||
user = request.user
|
||||
seri = self.get_serializer(data=request.data)
|
||||
seri.is_valid(raise_exception=True)
|
||||
ids = seri.validated_data['ids']
|
||||
s = self.get_serializer(data=request.data)
|
||||
s.is_valid(raise_exception=True)
|
||||
ids = s.validated_data['ids']
|
||||
SiteMessageUtil.mark_msgs_as_read(user.id, ids)
|
||||
return Response({'detail': 'ok'})
|
||||
|
||||
|
@ -58,7 +57,7 @@ class SiteMessageViewSet(ListModelMixin, RetrieveModelMixin, JMSGenericViewSet):
|
|||
|
||||
@action(methods=[POST], detail=False)
|
||||
def send(self, request, **kwargs):
|
||||
seri = self.get_serializer(data=request.data)
|
||||
seri.is_valid(raise_exception=True)
|
||||
SiteMessageUtil.send_msg(**seri.validated_data, sender=request.user)
|
||||
s = self.get_serializer(data=request.data)
|
||||
s.is_valid(raise_exception=True)
|
||||
SiteMessageUtil.send_msg(**s.validated_data, sender=request.user)
|
||||
return Response({'detail': 'ok'})
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
import django_filters
|
||||
|
||||
from common.drf.filters import BaseFilterSet
|
||||
from .models import SiteMessage
|
||||
from .models import MessageContent
|
||||
|
||||
|
||||
class SiteMsgFilter(BaseFilterSet):
|
||||
|
@ -14,5 +14,5 @@ class SiteMsgFilter(BaseFilterSet):
|
|||
has_read = django_filters.BooleanFilter(method='do_nothing')
|
||||
|
||||
class Meta:
|
||||
model = SiteMessage
|
||||
model = MessageContent
|
||||
fields = ('has_read',)
|
||||
|
|
|
@ -0,0 +1,72 @@
|
|||
# Generated by Django 3.2.14 on 2023-02-01 08:14
|
||||
|
||||
from django.conf import settings
|
||||
from django.db import migrations, models
|
||||
import django.db.models.deletion
|
||||
import uuid
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
|
||||
('users', '0041_auto_20221220_1956'),
|
||||
('notifications', '0003_auto_20221220_1956'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.RemoveField(
|
||||
model_name='sitemessageusers',
|
||||
name='sitemessage',
|
||||
),
|
||||
migrations.RemoveField(
|
||||
model_name='sitemessageusers',
|
||||
name='user',
|
||||
),
|
||||
migrations.DeleteModel(
|
||||
name='SiteMessage',
|
||||
),
|
||||
migrations.DeleteModel(
|
||||
name='SiteMessageUsers',
|
||||
),
|
||||
migrations.CreateModel(
|
||||
name='MessageContent',
|
||||
fields=[
|
||||
('created_by', models.CharField(blank=True, max_length=128, null=True, verbose_name='Created by')),
|
||||
('updated_by', models.CharField(blank=True, max_length=128, null=True, verbose_name='Updated by')),
|
||||
('date_created', models.DateTimeField(auto_now_add=True, null=True, verbose_name='Date created')),
|
||||
('date_updated', models.DateTimeField(auto_now=True, verbose_name='Date updated')),
|
||||
('id', models.UUIDField(default=uuid.uuid4, primary_key=True, serialize=False)),
|
||||
('subject', models.CharField(max_length=1024)),
|
||||
('message', models.TextField()),
|
||||
('is_broadcast', models.BooleanField(default=False)),
|
||||
('groups', models.ManyToManyField(to='users.UserGroup')),
|
||||
('sender', models.ForeignKey(db_constraint=False, default=None, null=True, on_delete=django.db.models.deletion.DO_NOTHING, related_name='send_site_message', to=settings.AUTH_USER_MODEL)),
|
||||
],
|
||||
options={
|
||||
'abstract': False,
|
||||
},
|
||||
),
|
||||
migrations.CreateModel(
|
||||
name='SiteMessage',
|
||||
fields=[
|
||||
('created_by', models.CharField(blank=True, max_length=128, null=True, verbose_name='Created by')),
|
||||
('updated_by', models.CharField(blank=True, max_length=128, null=True, verbose_name='Updated by')),
|
||||
('date_created', models.DateTimeField(auto_now_add=True, null=True, verbose_name='Date created')),
|
||||
('date_updated', models.DateTimeField(auto_now=True, verbose_name='Date updated')),
|
||||
('id', models.UUIDField(default=uuid.uuid4, primary_key=True, serialize=False)),
|
||||
('has_read', models.BooleanField(default=False)),
|
||||
('read_at', models.DateTimeField(default=None, null=True)),
|
||||
('content', models.ForeignKey(db_constraint=False, on_delete=django.db.models.deletion.CASCADE, related_name='messages', to='notifications.messagecontent')),
|
||||
('user', models.ForeignKey(db_constraint=False, on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)),
|
||||
],
|
||||
options={
|
||||
'abstract': False,
|
||||
},
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='messagecontent',
|
||||
name='users',
|
||||
field=models.ManyToManyField(related_name='recv_site_messages', through='notifications.SiteMessage', to=settings.AUTH_USER_MODEL),
|
||||
),
|
||||
]
|
|
@ -2,24 +2,24 @@ from django.db import models
|
|||
|
||||
from common.db.models import JMSBaseModel
|
||||
|
||||
__all__ = ('SiteMessageUsers', 'SiteMessage')
|
||||
__all__ = ('SiteMessage', 'MessageContent')
|
||||
|
||||
|
||||
class SiteMessageUsers(JMSBaseModel):
|
||||
sitemessage = models.ForeignKey('notifications.SiteMessage', on_delete=models.CASCADE, db_constraint=False,
|
||||
related_name='m2m_sitemessageusers')
|
||||
user = models.ForeignKey('users.User', on_delete=models.CASCADE, db_constraint=False,
|
||||
related_name='m2m_sitemessageusers')
|
||||
class SiteMessage(JMSBaseModel):
|
||||
content = models.ForeignKey('notifications.MessageContent', on_delete=models.CASCADE,
|
||||
db_constraint=False, related_name='messages')
|
||||
user = models.ForeignKey('users.User', on_delete=models.CASCADE, db_constraint=False)
|
||||
has_read = models.BooleanField(default=False)
|
||||
read_at = models.DateTimeField(default=None, null=True)
|
||||
comment = ''
|
||||
|
||||
|
||||
class SiteMessage(JMSBaseModel):
|
||||
class MessageContent(JMSBaseModel):
|
||||
subject = models.CharField(max_length=1024)
|
||||
message = models.TextField()
|
||||
users = models.ManyToManyField(
|
||||
'users.User', through=SiteMessageUsers, related_name='recv_site_messages'
|
||||
'users.User', through=SiteMessage,
|
||||
related_name='recv_site_messages'
|
||||
)
|
||||
groups = models.ManyToManyField('users.UserGroup')
|
||||
is_broadcast = models.BooleanField(default=False)
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
from rest_framework.serializers import ModelSerializer
|
||||
from rest_framework import serializers
|
||||
from rest_framework.serializers import ModelSerializer
|
||||
|
||||
from ..models import SiteMessage
|
||||
from ..models import MessageContent
|
||||
|
||||
|
||||
class SenderMixin(ModelSerializer):
|
||||
|
@ -15,12 +15,23 @@ class SenderMixin(ModelSerializer):
|
|||
return ''
|
||||
|
||||
|
||||
class SiteMessageDetailSerializer(SenderMixin, ModelSerializer):
|
||||
class MessageContentSerializer(SenderMixin, ModelSerializer):
|
||||
class Meta:
|
||||
model = SiteMessage
|
||||
model = MessageContent
|
||||
fields = [
|
||||
'id', 'subject', 'message', 'has_read', 'read_at',
|
||||
'date_created', 'date_updated', 'sender',
|
||||
'id', 'subject', 'message',
|
||||
'date_created', 'date_updated',
|
||||
'sender',
|
||||
]
|
||||
|
||||
|
||||
class SiteMessageSerializer(SenderMixin, ModelSerializer):
|
||||
content = MessageContentSerializer(read_only=True)
|
||||
|
||||
class Meta:
|
||||
model = MessageContent
|
||||
fields = [
|
||||
'id', 'has_read', 'read_at', 'content', 'date_created'
|
||||
]
|
||||
|
||||
|
||||
|
|
|
@ -12,7 +12,7 @@ from common.utils import get_logger
|
|||
from common.utils.connection import RedisPubSub
|
||||
from notifications.backends import BACKEND
|
||||
from users.models import User
|
||||
from .models import SiteMessage, SystemMsgSubscription, UserMsgSubscription
|
||||
from .models import MessageContent, SystemMsgSubscription, UserMsgSubscription
|
||||
from .notifications import SystemMessage
|
||||
|
||||
logger = get_logger(__name__)
|
||||
|
@ -26,7 +26,7 @@ class NewSiteMsgSubPub(LazyObject):
|
|||
new_site_msg_chan = NewSiteMsgSubPub()
|
||||
|
||||
|
||||
@receiver(post_save, sender=SiteMessage)
|
||||
@receiver(post_save, sender=MessageContent)
|
||||
@on_transaction_commit
|
||||
def on_site_message_create(sender, instance, created, **kwargs):
|
||||
if not created:
|
||||
|
|
|
@ -1,10 +1,9 @@
|
|||
from django.db.models import F, Q
|
||||
from django.db import transaction
|
||||
|
||||
from common.utils.timezone import local_now
|
||||
from common.utils import get_logger
|
||||
from common.utils.timezone import local_now
|
||||
from users.models import User
|
||||
from .models import SiteMessage as SiteMessageModel, SiteMessageUsers
|
||||
from .models import MessageContent as SiteMessageModel, SiteMessage
|
||||
|
||||
logger = get_logger(__file__)
|
||||
|
||||
|
@ -17,11 +16,6 @@ class SiteMessageUtil:
|
|||
if not any((user_ids, group_ids, is_broadcast)):
|
||||
raise ValueError('No recipient is specified')
|
||||
|
||||
logger.info(f'Site message send: '
|
||||
f'user_ids={user_ids} '
|
||||
f'group_ids={group_ids} '
|
||||
f'subject={subject} '
|
||||
f'message={message}')
|
||||
with transaction.atomic():
|
||||
site_msg = SiteMessageModel.objects.create(
|
||||
subject=subject, message=message,
|
||||
|
@ -30,65 +24,46 @@ class SiteMessageUtil:
|
|||
|
||||
if is_broadcast:
|
||||
user_ids = User.objects.all().values_list('id', flat=True)
|
||||
else:
|
||||
if group_ids:
|
||||
site_msg.groups.add(*group_ids)
|
||||
elif group_ids:
|
||||
site_msg.groups.add(*group_ids)
|
||||
|
||||
user_ids_from_group = User.groups.through.objects.filter(
|
||||
usergroup_id__in=group_ids
|
||||
).values_list('user_id', flat=True)
|
||||
user_ids = [*user_ids, *user_ids_from_group]
|
||||
user_ids_from_group = User.groups.through.objects.filter(
|
||||
usergroup_id__in=group_ids
|
||||
).values_list('user_id', flat=True)
|
||||
user_ids = [*user_ids, *user_ids_from_group]
|
||||
|
||||
site_msg.users.add(*user_ids)
|
||||
|
||||
@classmethod
|
||||
def get_user_all_msgs(cls, user_id):
|
||||
site_msgs = SiteMessageModel.objects.filter(
|
||||
m2m_sitemessageusers__user_id=user_id
|
||||
).distinct().annotate(
|
||||
has_read=F('m2m_sitemessageusers__has_read'),
|
||||
read_at=F('m2m_sitemessageusers__read_at')
|
||||
).order_by('-date_created')
|
||||
|
||||
return site_msgs
|
||||
site_msg_rels = SiteMessage.objects \
|
||||
.filter(user=user_id) \
|
||||
.prefetch_related('content') \
|
||||
.order_by('-date_created')
|
||||
return site_msg_rels
|
||||
|
||||
@classmethod
|
||||
def get_user_all_msgs_count(cls, user_id):
|
||||
site_msgs_count = SiteMessageModel.objects.filter(
|
||||
m2m_sitemessageusers__user_id=user_id
|
||||
site_msgs_count = SiteMessage.objects.filter(
|
||||
user_id=user_id
|
||||
).distinct().count()
|
||||
return site_msgs_count
|
||||
|
||||
@classmethod
|
||||
def filter_user_msgs(cls, user_id, has_read=False):
|
||||
site_msgs = SiteMessageModel.objects.filter(
|
||||
m2m_sitemessageusers__user_id=user_id,
|
||||
m2m_sitemessageusers__has_read=has_read
|
||||
).distinct().annotate(
|
||||
has_read=F('m2m_sitemessageusers__has_read'),
|
||||
read_at=F('m2m_sitemessageusers__read_at')
|
||||
).order_by('-date_created')
|
||||
|
||||
return site_msgs
|
||||
return cls.get_user_all_msgs(user_id).filter(has_read=has_read)
|
||||
|
||||
@classmethod
|
||||
def get_user_unread_msgs_count(cls, user_id):
|
||||
site_msgs_count = SiteMessageModel.objects.filter(
|
||||
m2m_sitemessageusers__user_id=user_id,
|
||||
m2m_sitemessageusers__has_read=False
|
||||
).distinct().count()
|
||||
site_msgs_count = SiteMessage.objects \
|
||||
.filter(user=user_id, has_read=False) \
|
||||
.values_list('content', flat=True) \
|
||||
.distinct().count()
|
||||
return site_msgs_count
|
||||
|
||||
@classmethod
|
||||
def mark_msgs_as_read(cls, user_id, msg_ids=None):
|
||||
q = Q(user_id=user_id) & Q(has_read=False)
|
||||
if msg_ids is not None:
|
||||
q &= Q(sitemessage_id__in=msg_ids)
|
||||
site_msg_users = SiteMessageUsers.objects.filter(q)
|
||||
|
||||
for site_msg_user in site_msg_users:
|
||||
site_msg_user.has_read = True
|
||||
site_msg_user.read_at = local_now()
|
||||
|
||||
SiteMessageUsers.objects.bulk_update(
|
||||
site_msg_users, fields=('has_read', 'read_at'))
|
||||
site_msgs = SiteMessage.objects.filter(user_id=user_id)
|
||||
if msg_ids:
|
||||
site_msgs = site_msgs.filter(id__in=msg_ids)
|
||||
site_msgs.update(has_read=True, read_at=local_now())
|
||||
|
|
|
@ -1,7 +1,6 @@
|
|||
|
||||
from rest_framework_bulk.routes import BulkRouter
|
||||
from django.urls import path
|
||||
from django.conf import settings
|
||||
from django.urls import path
|
||||
from rest_framework_bulk.routes import BulkRouter
|
||||
|
||||
from notifications import api
|
||||
|
||||
|
@ -10,11 +9,12 @@ app_name = 'notifications'
|
|||
router = BulkRouter()
|
||||
router.register('system-msg-subscription', api.SystemMsgSubscriptionViewSet, 'system-msg-subscription')
|
||||
router.register('user-msg-subscription', api.UserMsgSubscriptionViewSet, 'user-msg-subscription')
|
||||
router.register('site-message', api.SiteMessageViewSet, 'site-message')
|
||||
router.register('site-messages', api.SiteMessageViewSet, 'site-message')
|
||||
|
||||
urlpatterns = [
|
||||
path('backends/', api.BackendListView.as_view(), name='backends')
|
||||
] + router.urls
|
||||
]
|
||||
urlpatterns += router.urls
|
||||
|
||||
if settings.DEBUG:
|
||||
urlpatterns += [
|
||||
|
|
|
@ -105,7 +105,7 @@ class JMSInventory:
|
|||
'id': str(asset.id), 'name': asset.name, 'address': asset.address,
|
||||
'type': asset.type, 'category': asset.category,
|
||||
'protocol': asset.protocol, 'port': asset.port,
|
||||
'specific': asset.specific,
|
||||
'spec_info': asset.spec_info, 'secret_info': asset.secret_info,
|
||||
'protocols': [{'name': p.name, 'port': p.port} for p in protocols],
|
||||
},
|
||||
'jms_account': {
|
||||
|
|
|
@ -1,31 +1,26 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
#
|
||||
|
||||
import os
|
||||
import re
|
||||
|
||||
from celery.result import AsyncResult
|
||||
from rest_framework import generics, viewsets, mixins
|
||||
from django.shortcuts import get_object_or_404
|
||||
from django.utils.translation import ugettext as _
|
||||
from rest_framework import viewsets
|
||||
from celery.result import AsyncResult
|
||||
from rest_framework import generics
|
||||
from django_celery_beat.models import PeriodicTask
|
||||
|
||||
from common.permissions import IsValidUser
|
||||
from common.api import LogTailApi
|
||||
from common.api import LogTailApi, CommonApiMixin
|
||||
from ..models import CeleryTaskExecution, CeleryTask
|
||||
from ..serializers import CeleryResultSerializer, CeleryPeriodTaskSerializer
|
||||
from ..celery.utils import get_celery_task_log_path
|
||||
from ..ansible.utils import get_ansible_task_log_path
|
||||
from common.api import CommonApiMixin
|
||||
from ..serializers import CeleryResultSerializer, CeleryPeriodTaskSerializer
|
||||
from ..serializers.celery import CeleryTaskSerializer, CeleryTaskExecutionSerializer
|
||||
|
||||
__all__ = [
|
||||
'CeleryTaskExecutionLogApi', 'CeleryResultApi', 'CeleryPeriodTaskViewSet',
|
||||
'AnsibleTaskLogApi', 'CeleryTaskViewSet', 'CeleryTaskExecutionViewSet'
|
||||
]
|
||||
|
||||
from ..serializers.celery import CeleryTaskSerializer, CeleryTaskExecutionSerializer
|
||||
|
||||
|
||||
class CeleryTaskExecutionLogApi(LogTailApi):
|
||||
permission_classes = (IsValidUser,)
|
||||
|
@ -103,9 +98,12 @@ class CelerySummaryAPIView(generics.RetrieveAPIView):
|
|||
pass
|
||||
|
||||
|
||||
class CeleryTaskViewSet(CommonApiMixin, viewsets.ReadOnlyModelViewSet):
|
||||
class CeleryTaskViewSet(
|
||||
CommonApiMixin, mixins.RetrieveModelMixin,
|
||||
mixins.ListModelMixin, mixins.DestroyModelMixin,
|
||||
viewsets.GenericViewSet
|
||||
):
|
||||
serializer_class = CeleryTaskSerializer
|
||||
http_method_names = ('get', 'head', 'options',)
|
||||
|
||||
def get_queryset(self):
|
||||
return CeleryTask.objects.exclude(name__startswith='celery')
|
||||
|
|
|
@ -1,13 +1,19 @@
|
|||
import os
|
||||
import shutil
|
||||
import zipfile
|
||||
|
||||
from django.conf import settings
|
||||
from django.shortcuts import get_object_or_404
|
||||
|
||||
from orgs.mixins.api import OrgBulkModelViewSet
|
||||
from ..exception import PlaybookNoValidEntry
|
||||
from ..models import Playbook
|
||||
from ..serializers.playbook import PlaybookSerializer
|
||||
|
||||
__all__ = ["PlaybookViewSet"]
|
||||
__all__ = ["PlaybookViewSet", "PlaybookFileBrowserAPIView"]
|
||||
|
||||
from rest_framework.views import APIView
|
||||
from rest_framework.response import Response
|
||||
|
||||
|
||||
def unzip_playbook(src, dist):
|
||||
|
@ -31,12 +37,180 @@ class PlaybookViewSet(OrgBulkModelViewSet):
|
|||
|
||||
def perform_create(self, serializer):
|
||||
instance = serializer.save()
|
||||
src_path = os.path.join(settings.MEDIA_ROOT, instance.path.name)
|
||||
dest_path = os.path.join(settings.DATA_DIR, "ops", "playbook", instance.id.__str__())
|
||||
unzip_playbook(src_path, dest_path)
|
||||
valid_entry = ('main.yml', 'main.yaml', 'main')
|
||||
for f in os.listdir(dest_path):
|
||||
if f in valid_entry:
|
||||
return
|
||||
os.remove(dest_path)
|
||||
raise PlaybookNoValidEntry
|
||||
if instance.create_method == 'blank':
|
||||
dest_path = os.path.join(settings.DATA_DIR, "ops", "playbook", instance.id.__str__())
|
||||
os.makedirs(dest_path)
|
||||
with open(os.path.join(dest_path, 'main.yml'), 'w') as f:
|
||||
f.write('## write your playbook here')
|
||||
|
||||
if instance.create_method == 'upload':
|
||||
src_path = os.path.join(settings.MEDIA_ROOT, instance.path.name)
|
||||
dest_path = os.path.join(settings.DATA_DIR, "ops", "playbook", instance.id.__str__())
|
||||
unzip_playbook(src_path, dest_path)
|
||||
valid_entry = ('main.yml', 'main.yaml', 'main')
|
||||
for f in os.listdir(dest_path):
|
||||
if f in valid_entry:
|
||||
return
|
||||
os.remove(dest_path)
|
||||
raise PlaybookNoValidEntry
|
||||
|
||||
|
||||
class PlaybookFileBrowserAPIView(APIView):
|
||||
rbac_perms = ()
|
||||
permission_classes = ()
|
||||
|
||||
def get(self, request, **kwargs):
|
||||
playbook_id = kwargs.get('pk')
|
||||
playbook = get_object_or_404(Playbook, id=playbook_id)
|
||||
work_path = playbook.work_dir
|
||||
file_key = request.query_params.get('key', '')
|
||||
if file_key:
|
||||
file_path = os.path.join(work_path, file_key)
|
||||
with open(file_path, 'r') as f:
|
||||
content = f.read()
|
||||
return Response({'content': content})
|
||||
else:
|
||||
expand_key = request.query_params.get('expand', '')
|
||||
nodes = self.generate_tree(playbook, work_path, expand_key)
|
||||
return Response(nodes)
|
||||
|
||||
def post(self, request, **kwargs):
|
||||
playbook_id = kwargs.get('pk')
|
||||
playbook = get_object_or_404(Playbook, id=playbook_id)
|
||||
work_path = playbook.work_dir
|
||||
|
||||
parent_key = request.data.get('key', '')
|
||||
if parent_key == 'root':
|
||||
parent_key = ''
|
||||
if os.path.dirname(parent_key) == 'root':
|
||||
parent_key = os.path.basename(parent_key)
|
||||
full_path = os.path.join(work_path, parent_key)
|
||||
|
||||
is_directory = request.data.get('is_directory', False)
|
||||
content = request.data.get('content', '')
|
||||
name = request.data.get('name', '')
|
||||
|
||||
def find_new_name(p, is_file=False):
|
||||
if not p:
|
||||
if is_file:
|
||||
p = 'new_file.yml'
|
||||
else:
|
||||
p = 'new_dir'
|
||||
np = os.path.join(full_path, p)
|
||||
n = 0
|
||||
while os.path.exists(np):
|
||||
n += 1
|
||||
np = os.path.join(full_path, '{}({})'.format(p, n))
|
||||
return np
|
||||
|
||||
if is_directory:
|
||||
new_file_path = find_new_name(name)
|
||||
os.makedirs(new_file_path)
|
||||
else:
|
||||
new_file_path = find_new_name(name, True)
|
||||
with open(new_file_path, 'w') as f:
|
||||
f.write(content)
|
||||
|
||||
relative_path = os.path.relpath(os.path.dirname(new_file_path), work_path)
|
||||
new_node = {
|
||||
"name": os.path.basename(new_file_path),
|
||||
"title": os.path.basename(new_file_path),
|
||||
"id": os.path.join(relative_path, os.path.basename(new_file_path))
|
||||
if not os.path.join(relative_path, os.path.basename(new_file_path)).startswith('.')
|
||||
else os.path.basename(new_file_path),
|
||||
"isParent": is_directory,
|
||||
"pId": relative_path if not relative_path.startswith('.') else 'root',
|
||||
"open": True,
|
||||
}
|
||||
if not is_directory:
|
||||
new_node['iconSkin'] = 'file'
|
||||
return Response(new_node)
|
||||
|
||||
def patch(self, request, **kwargs):
|
||||
playbook_id = kwargs.get('pk')
|
||||
playbook = get_object_or_404(Playbook, id=playbook_id)
|
||||
work_path = playbook.work_dir
|
||||
|
||||
file_key = request.data.get('key', '')
|
||||
if os.path.dirname(file_key) == 'root':
|
||||
file_key = os.path.basename(file_key)
|
||||
|
||||
new_name = request.data.get('new_name', '')
|
||||
content = request.data.get('content', '')
|
||||
is_directory = request.data.get('is_directory', False)
|
||||
|
||||
if not file_key or file_key == 'root':
|
||||
return Response(status=400)
|
||||
file_path = os.path.join(work_path, file_key)
|
||||
|
||||
if new_name:
|
||||
new_file_path = os.path.join(os.path.dirname(file_path), new_name)
|
||||
os.rename(file_path, new_file_path)
|
||||
file_path = new_file_path
|
||||
|
||||
if not is_directory and content:
|
||||
with open(file_path, 'w') as f:
|
||||
f.write(content)
|
||||
return Response({'msg': 'ok'})
|
||||
|
||||
def delete(self, request, **kwargs):
|
||||
not_delete_allowed = ['root', 'main.yml']
|
||||
playbook_id = kwargs.get('pk')
|
||||
playbook = get_object_or_404(Playbook, id=playbook_id)
|
||||
work_path = playbook.work_dir
|
||||
file_key = request.query_params.get('key', '')
|
||||
if not file_key:
|
||||
return Response(status=400)
|
||||
if file_key in not_delete_allowed:
|
||||
return Response(status=400)
|
||||
file_path = os.path.join(work_path, file_key)
|
||||
if os.path.isdir(file_path):
|
||||
shutil.rmtree(file_path)
|
||||
else:
|
||||
os.remove(file_path)
|
||||
return Response({'msg': 'ok'})
|
||||
|
||||
@staticmethod
|
||||
def generate_tree(playbook, root_path, expand_key=None):
|
||||
nodes = [{
|
||||
"name": playbook.name,
|
||||
"title": playbook.name,
|
||||
"id": 'root',
|
||||
"isParent": True,
|
||||
"open": True,
|
||||
"pId": '',
|
||||
"temp": False
|
||||
}]
|
||||
for path, dirs, files in os.walk(root_path):
|
||||
dirs.sort()
|
||||
files.sort()
|
||||
|
||||
relative_path = os.path.relpath(path, root_path)
|
||||
for d in dirs:
|
||||
node = {
|
||||
"name": d,
|
||||
"title": d,
|
||||
"id": os.path.join(relative_path, d) if not os.path.join(relative_path, d).startswith(
|
||||
'.') else d,
|
||||
"isParent": True,
|
||||
"open": True,
|
||||
"pId": relative_path if not relative_path.startswith('.') else 'root',
|
||||
"temp": False
|
||||
}
|
||||
if expand_key == node['id']:
|
||||
node['open'] = True
|
||||
nodes.append(node)
|
||||
for f in files:
|
||||
node = {
|
||||
"name": f,
|
||||
"title": f,
|
||||
"iconSkin": 'file',
|
||||
"id": os.path.join(relative_path, f) if not os.path.join(relative_path, f).startswith(
|
||||
'.') else f,
|
||||
"isParent": False,
|
||||
"open": False,
|
||||
"pId": relative_path if not relative_path.startswith('.') else 'root',
|
||||
"temp": False
|
||||
}
|
||||
nodes.append(node)
|
||||
return nodes
|
||||
|
|
|
@ -29,6 +29,11 @@ DEFAULT_PASSWORD_RULES = {
|
|||
}
|
||||
|
||||
|
||||
class CreateMethods(models.TextChoices):
|
||||
blank = 'blank', _('Blank')
|
||||
vcs = 'vcs', _('VCS')
|
||||
|
||||
|
||||
class Types(models.TextChoices):
|
||||
adhoc = 'adhoc', _('Adhoc')
|
||||
playbook = 'playbook', _('Playbook')
|
||||
|
|
|
@ -0,0 +1,23 @@
|
|||
# Generated by Django 3.2.14 on 2023-01-17 03:30
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('ops', '0024_alter_celerytask_date_last_publish'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name='playbook',
|
||||
name='create_method',
|
||||
field=models.CharField(choices=[('blank', 'Blank'), ('vcs', 'VCS')], default='blank', max_length=128, verbose_name='CreateMethod'),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='playbook',
|
||||
name='vcs_url',
|
||||
field=models.CharField(blank=True, default='', max_length=1024, null=True, verbose_name='VCS URL'),
|
||||
),
|
||||
]
|
|
@ -0,0 +1,19 @@
|
|||
# Generated by Django 3.2.14 on 2023-02-03 08:40
|
||||
|
||||
from django.db import migrations, models
|
||||
import django.db.models.deletion
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('ops', '0025_auto_20230117_1130'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AlterField(
|
||||
model_name='jobexecution',
|
||||
name='job',
|
||||
field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='executions', to='ops.job'),
|
||||
),
|
||||
]
|
|
@ -1,4 +1,3 @@
|
|||
import datetime
|
||||
import json
|
||||
import logging
|
||||
import os
|
||||
|
@ -97,7 +96,7 @@ class JobExecution(JMSOrgBaseModel):
|
|||
id = models.UUIDField(default=uuid.uuid4, primary_key=True)
|
||||
task_id = models.UUIDField(null=True)
|
||||
status = models.CharField(max_length=16, verbose_name=_('Status'), default=JobStatus.running)
|
||||
job = models.ForeignKey(Job, on_delete=models.CASCADE, related_name='executions', null=True)
|
||||
job = models.ForeignKey(Job, on_delete=models.SET_NULL, related_name='executions', null=True)
|
||||
job_version = models.IntegerField(default=0)
|
||||
parameters = models.JSONField(default=dict, verbose_name=_('Parameters'))
|
||||
result = models.JSONField(blank=True, null=True, verbose_name=_('Result'))
|
||||
|
@ -122,41 +121,42 @@ class JobExecution(JMSOrgBaseModel):
|
|||
|
||||
@property
|
||||
def assent_result_detail(self):
|
||||
if self.is_finished and not self.summary.get('error', None):
|
||||
result = {
|
||||
"summary": self.count,
|
||||
"detail": [],
|
||||
if not self.is_finished or self.summary.get('error'):
|
||||
return None
|
||||
result = {
|
||||
"summary": self.summary,
|
||||
"detail": [],
|
||||
}
|
||||
for asset in self.current_job.assets.all():
|
||||
asset_detail = {
|
||||
"name": asset.name,
|
||||
"status": "ok",
|
||||
"tasks": [],
|
||||
}
|
||||
for asset in self.current_job.assets.all():
|
||||
asset_detail = {
|
||||
"name": asset.name,
|
||||
"status": "ok",
|
||||
"tasks": [],
|
||||
}
|
||||
if self.summary.get("excludes", None) and self.summary["excludes"].get(asset.name, None):
|
||||
asset_detail.update({"status": "excludes"})
|
||||
result["detail"].append(asset_detail)
|
||||
break
|
||||
if self.result["dark"].get(asset.name, None):
|
||||
asset_detail.update({"status": "failed"})
|
||||
for key, task in self.result["dark"][asset.name].items():
|
||||
task_detail = {"name": key,
|
||||
"output": "{}{}".format(task.get("stdout", ""), task.get("stderr", ""))}
|
||||
asset_detail["tasks"].append(task_detail)
|
||||
if self.result["failures"].get(asset.name, None):
|
||||
asset_detail.update({"status": "failed"})
|
||||
for key, task in self.result["failures"][asset.name].items():
|
||||
task_detail = {"name": key,
|
||||
"output": "{}{}".format(task.get("stdout", ""), task.get("stderr", ""))}
|
||||
asset_detail["tasks"].append(task_detail)
|
||||
|
||||
if self.result["ok"].get(asset.name, None):
|
||||
for key, task in self.result["ok"][asset.name].items():
|
||||
task_detail = {"name": key,
|
||||
"output": "{}{}".format(task.get("stdout", ""), task.get("stderr", ""))}
|
||||
asset_detail["tasks"].append(task_detail)
|
||||
if self.summary.get("excludes", None) and self.summary["excludes"].get(asset.name, None):
|
||||
asset_detail.update({"status": "excludes"})
|
||||
result["detail"].append(asset_detail)
|
||||
return result
|
||||
break
|
||||
if self.result["dark"].get(asset.name, None):
|
||||
asset_detail.update({"status": "failed"})
|
||||
for key, task in self.result["dark"][asset.name].items():
|
||||
task_detail = {"name": key,
|
||||
"output": "{}{}".format(task.get("stdout", ""), task.get("stderr", ""))}
|
||||
asset_detail["tasks"].append(task_detail)
|
||||
if self.result["failures"].get(asset.name, None):
|
||||
asset_detail.update({"status": "failed"})
|
||||
for key, task in self.result["failures"][asset.name].items():
|
||||
task_detail = {"name": key,
|
||||
"output": "{}{}".format(task.get("stdout", ""), task.get("stderr", ""))}
|
||||
asset_detail["tasks"].append(task_detail)
|
||||
|
||||
if self.result["ok"].get(asset.name, None):
|
||||
for key, task in self.result["ok"][asset.name].items():
|
||||
task_detail = {"name": key,
|
||||
"output": "{}{}".format(task.get("stdout", ""), task.get("stderr", ""))}
|
||||
asset_detail["tasks"].append(task_detail)
|
||||
result["detail"].append(asset_detail)
|
||||
return result
|
||||
|
||||
@property
|
||||
def job_type(self):
|
||||
|
@ -176,9 +176,7 @@ class JobExecution(JMSOrgBaseModel):
|
|||
|
||||
shell = self.current_job.args
|
||||
if self.current_job.chdir:
|
||||
if module == self.current_job.module:
|
||||
shell += " path={}".format(self.current_job.chdir)
|
||||
else:
|
||||
if module == "shell":
|
||||
shell += " chdir={}".format(self.current_job.chdir)
|
||||
if self.current_job.module in ['python']:
|
||||
shell += " executable={}".format(self.current_job.module)
|
||||
|
|
|
@ -5,6 +5,7 @@ from django.conf import settings
|
|||
from django.db import models
|
||||
from django.utils.translation import gettext_lazy as _
|
||||
|
||||
from ops.const import CreateMethods
|
||||
from ops.exception import PlaybookNoValidEntry
|
||||
from orgs.mixins.models import JMSOrgBaseModel
|
||||
|
||||
|
@ -15,12 +16,20 @@ class Playbook(JMSOrgBaseModel):
|
|||
path = models.FileField(upload_to='playbooks/')
|
||||
creator = models.ForeignKey('users.User', verbose_name=_("Creator"), on_delete=models.SET_NULL, null=True)
|
||||
comment = models.CharField(max_length=1024, default='', verbose_name=_('Comment'), null=True, blank=True)
|
||||
create_method = models.CharField(max_length=128, choices=CreateMethods.choices, default=CreateMethods.blank,
|
||||
verbose_name=_('CreateMethod'))
|
||||
vcs_url = models.CharField(max_length=1024, default='', verbose_name=_('VCS URL'), null=True, blank=True)
|
||||
|
||||
@property
|
||||
def entry(self):
|
||||
work_dir = os.path.join(settings.DATA_DIR, "ops", "playbook", self.id.__str__())
|
||||
work_dir = self.work_dir
|
||||
valid_entry = ('main.yml', 'main.yaml', 'main')
|
||||
for f in os.listdir(work_dir):
|
||||
if f in valid_entry:
|
||||
return os.path.join(work_dir, f)
|
||||
raise PlaybookNoValidEntry
|
||||
|
||||
@property
|
||||
def work_dir(self):
|
||||
work_dir = os.path.join(settings.DATA_DIR, "ops", "playbook", self.id.__str__())
|
||||
return work_dir
|
||||
|
|
|
@ -27,5 +27,5 @@ class PlaybookSerializer(BulkOrgResourceModelSerializer):
|
|||
model = Playbook
|
||||
read_only_fields = ["id", "date_created", "date_updated"]
|
||||
fields = read_only_fields + [
|
||||
"id", 'path', "name", "comment", "creator",
|
||||
"id", 'path', "name", "comment", "creator", 'create_method', 'vcs_url',
|
||||
]
|
||||
|
|
|
@ -23,6 +23,7 @@ router.register(r'tasks', api.CeleryTaskViewSet, 'task')
|
|||
router.register(r'task-executions', api.CeleryTaskExecutionViewSet, 'task-executions')
|
||||
|
||||
urlpatterns = [
|
||||
path('playbook/<uuid:pk>/file/', api.PlaybookFileBrowserAPIView.as_view(), name='playbook-file'),
|
||||
path('variables/help/', api.JobRunVariableHelpAPIView.as_view(), name='variable-help'),
|
||||
path('job-execution/asset-detail/', api.JobAssetDetail.as_view(), name='asset-detail'),
|
||||
path('job-execution/task-detail/<uuid:task_id>/', api.JobExecutionTaskDetail.as_view(), name='task-detail'),
|
||||
|
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue