mirror of https://github.com/jumpserver/jumpserver
perf: merge with dev
commit
136bec94ca
|
@ -1,14 +1,14 @@
|
||||||
from copy import deepcopy
|
from copy import deepcopy
|
||||||
|
|
||||||
from common.utils import get_logger
|
from common.utils import get_logger
|
||||||
from accounts.const import AutomationTypes, SecretType
|
from accounts.const import SecretType
|
||||||
from assets.automations.base.manager import BasePlaybookManager
|
from assets.automations.base.manager import BasePlaybookManager
|
||||||
from accounts.automations.methods import platform_automation_methods
|
from accounts.automations.methods import platform_automation_methods
|
||||||
|
|
||||||
logger = get_logger(__name__)
|
logger = get_logger(__name__)
|
||||||
|
|
||||||
|
|
||||||
class PushOrVerifyHostCallbackMixin:
|
class VerifyHostCallbackMixin:
|
||||||
execution: callable
|
execution: callable
|
||||||
get_accounts: callable
|
get_accounts: callable
|
||||||
host_account_mapper: dict
|
host_account_mapper: dict
|
||||||
|
|
|
@ -11,12 +11,13 @@
|
||||||
login_host: "{{ jms_asset.address }}"
|
login_host: "{{ jms_asset.address }}"
|
||||||
login_port: "{{ jms_asset.port }}"
|
login_port: "{{ jms_asset.port }}"
|
||||||
login_db: "{{ jms_asset.spec_info.db_name }}"
|
login_db: "{{ jms_asset.spec_info.db_name }}"
|
||||||
register: db_info
|
register: result
|
||||||
|
failed_when: not result.is_available
|
||||||
|
|
||||||
- name: Display PostgreSQL version
|
- name: Display PostgreSQL version
|
||||||
debug:
|
debug:
|
||||||
var: db_info.server_version.full
|
var: result.server_version.full
|
||||||
when: db_info is succeeded
|
when: result is succeeded
|
||||||
|
|
||||||
- name: Change PostgreSQL password
|
- name: Change PostgreSQL password
|
||||||
community.postgresql.postgresql_user:
|
community.postgresql.postgresql_user:
|
||||||
|
@ -27,7 +28,7 @@
|
||||||
db: "{{ jms_asset.spec_info.db_name }}"
|
db: "{{ jms_asset.spec_info.db_name }}"
|
||||||
name: "{{ account.username }}"
|
name: "{{ account.username }}"
|
||||||
password: "{{ account.secret }}"
|
password: "{{ account.secret }}"
|
||||||
when: db_info is succeeded
|
when: result is succeeded
|
||||||
register: change_info
|
register: change_info
|
||||||
|
|
||||||
- name: Verify password
|
- name: Verify password
|
||||||
|
@ -38,5 +39,7 @@
|
||||||
login_port: "{{ jms_asset.port }}"
|
login_port: "{{ jms_asset.port }}"
|
||||||
db: "{{ jms_asset.spec_info.db_name }}"
|
db: "{{ jms_asset.spec_info.db_name }}"
|
||||||
when:
|
when:
|
||||||
- db_info is succeeded
|
- result is succeeded
|
||||||
- change_info is succeeded
|
- change_info is succeeded
|
||||||
|
register: result
|
||||||
|
failed_when: not result.is_available
|
||||||
|
|
|
@ -8,10 +8,18 @@
|
||||||
# debug:
|
# debug:
|
||||||
# msg: "Username: {{ account.username }}, Password: {{ account.secret }}"
|
# msg: "Username: {{ account.username }}, Password: {{ account.secret }}"
|
||||||
|
|
||||||
|
|
||||||
|
- name: Get groups of a Windows user
|
||||||
|
ansible.windows.win_user:
|
||||||
|
name: "{{ jms_account.username }}"
|
||||||
|
register: user_info
|
||||||
|
|
||||||
- name: Change password
|
- name: Change password
|
||||||
ansible.windows.win_user:
|
ansible.windows.win_user:
|
||||||
name: "{{ account.username }}"
|
name: "{{ account.username }}"
|
||||||
password: "{{ account.secret }}"
|
password: "{{ account.secret }}"
|
||||||
|
groups: "{{ user_info.groups[0].name }}"
|
||||||
|
groups_action: add
|
||||||
update_password: always
|
update_password: always
|
||||||
when: account.secret_type == "password"
|
when: account.secret_type == "password"
|
||||||
|
|
||||||
|
|
|
@ -22,6 +22,8 @@ logger = get_logger(__name__)
|
||||||
|
|
||||||
|
|
||||||
class ChangeSecretManager(AccountBasePlaybookManager):
|
class ChangeSecretManager(AccountBasePlaybookManager):
|
||||||
|
ansible_account_prefer = ''
|
||||||
|
|
||||||
def __init__(self, *args, **kwargs):
|
def __init__(self, *args, **kwargs):
|
||||||
super().__init__(*args, **kwargs)
|
super().__init__(*args, **kwargs)
|
||||||
self.method_hosts_mapper = defaultdict(list)
|
self.method_hosts_mapper = defaultdict(list)
|
||||||
|
@ -33,18 +35,12 @@ class ChangeSecretManager(AccountBasePlaybookManager):
|
||||||
'ssh_key_change_strategy', SSHKeyStrategy.add
|
'ssh_key_change_strategy', SSHKeyStrategy.add
|
||||||
)
|
)
|
||||||
self.snapshot_account_usernames = self.execution.snapshot['accounts']
|
self.snapshot_account_usernames = self.execution.snapshot['accounts']
|
||||||
self._password_generated = None
|
|
||||||
self._ssh_key_generated = None
|
|
||||||
self.name_recorder_mapper = {} # 做个映射,方便后面处理
|
self.name_recorder_mapper = {} # 做个映射,方便后面处理
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def method_type(cls):
|
def method_type(cls):
|
||||||
return AutomationTypes.change_secret
|
return AutomationTypes.change_secret
|
||||||
|
|
||||||
@lazyproperty
|
|
||||||
def related_accounts(self):
|
|
||||||
pass
|
|
||||||
|
|
||||||
def get_kwargs(self, account, secret):
|
def get_kwargs(self, account, secret):
|
||||||
kwargs = {}
|
kwargs = {}
|
||||||
if self.secret_type != SecretType.SSH_KEY:
|
if self.secret_type != SecretType.SSH_KEY:
|
||||||
|
@ -152,12 +148,16 @@ class ChangeSecretManager(AccountBasePlaybookManager):
|
||||||
def on_runner_failed(self, runner, e):
|
def on_runner_failed(self, runner, e):
|
||||||
logger.error("Change secret error: ", e)
|
logger.error("Change secret error: ", e)
|
||||||
|
|
||||||
def run(self, *args, **kwargs):
|
def check_secret(self):
|
||||||
if self.secret_strategy == SecretStrategy.custom \
|
if self.secret_strategy == SecretStrategy.custom \
|
||||||
and not self.execution.snapshot['secret']:
|
and not self.execution.snapshot['secret']:
|
||||||
print('Custom secret is empty')
|
print('Custom secret is empty')
|
||||||
return
|
return False
|
||||||
|
return True
|
||||||
|
|
||||||
|
def run(self, *args, **kwargs):
|
||||||
|
if not self.check_secret():
|
||||||
|
return
|
||||||
super().run(*args, **kwargs)
|
super().run(*args, **kwargs)
|
||||||
recorders = self.name_recorder_mapper.values()
|
recorders = self.name_recorder_mapper.values()
|
||||||
recorders = list(recorders)
|
recorders = list(recorders)
|
||||||
|
|
|
@ -30,6 +30,10 @@ class GatherAccountsFilter:
|
||||||
result = {}
|
result = {}
|
||||||
for line in info:
|
for line in info:
|
||||||
data = line.split('@')
|
data = line.split('@')
|
||||||
|
if len(data) == 1:
|
||||||
|
result[line] = {}
|
||||||
|
continue
|
||||||
|
|
||||||
if len(data) != 3:
|
if len(data) != 3:
|
||||||
continue
|
continue
|
||||||
username, address, dt = data
|
username, address, dt = data
|
||||||
|
|
|
@ -4,8 +4,13 @@
|
||||||
- name: Gather posix account
|
- name: Gather posix account
|
||||||
ansible.builtin.shell:
|
ansible.builtin.shell:
|
||||||
cmd: >
|
cmd: >
|
||||||
users=$(getent passwd | grep -v nologin | grep -v shutdown | awk -F":" '{ print $1 }');for i in $users;
|
users=$(getent passwd | grep -v nologin | grep -v shutdown | awk -F":" '{ print $1 }');for i in $users;
|
||||||
do last -w -F $i -1 | head -1 | grep -v ^$ | awk '{ print $1"@"$3"@"$5,$6,$7,$8 }';done
|
do k=$(last -w -F $i -1 | head -1 | grep -v ^$ | awk '{ print $1"@"$3"@"$5,$6,$7,$8 }')
|
||||||
|
if [ -n "$k" ]; then
|
||||||
|
echo $k
|
||||||
|
else
|
||||||
|
echo $i
|
||||||
|
fi;done
|
||||||
register: result
|
register: result
|
||||||
|
|
||||||
- name: Define info by set_fact
|
- name: Define info by set_fact
|
||||||
|
|
|
@ -1,26 +1,24 @@
|
||||||
|
from copy import deepcopy
|
||||||
|
|
||||||
from django.db.models import QuerySet
|
from django.db.models import QuerySet
|
||||||
|
|
||||||
from common.utils import get_logger
|
from common.utils import get_logger
|
||||||
from accounts.const import AutomationTypes
|
|
||||||
from accounts.models import Account
|
from accounts.models import Account
|
||||||
from ..base.manager import PushOrVerifyHostCallbackMixin, AccountBasePlaybookManager
|
from accounts.const import AutomationTypes, SecretType
|
||||||
|
from ..base.manager import AccountBasePlaybookManager
|
||||||
|
from ..change_secret.manager import ChangeSecretManager
|
||||||
|
|
||||||
logger = get_logger(__name__)
|
logger = get_logger(__name__)
|
||||||
|
|
||||||
|
|
||||||
class PushAccountManager(PushOrVerifyHostCallbackMixin, AccountBasePlaybookManager):
|
class PushAccountManager(ChangeSecretManager, AccountBasePlaybookManager):
|
||||||
|
ansible_account_prefer = ''
|
||||||
def __init__(self, *args, **kwargs):
|
|
||||||
super().__init__(*args, **kwargs)
|
|
||||||
self.secret_type = self.execution.snapshot['secret_type']
|
|
||||||
self.host_account_mapper = {}
|
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def method_type(cls):
|
def method_type(cls):
|
||||||
return AutomationTypes.push_account
|
return AutomationTypes.push_account
|
||||||
|
|
||||||
def create_nonlocal_accounts(self, accounts, snapshot_account_usernames, asset):
|
def create_nonlocal_accounts(self, accounts, snapshot_account_usernames, asset):
|
||||||
secret = self.execution.snapshot['secret']
|
|
||||||
secret_type = self.secret_type
|
secret_type = self.secret_type
|
||||||
usernames = accounts.filter(secret_type=secret_type).values_list(
|
usernames = accounts.filter(secret_type=secret_type).values_list(
|
||||||
'username', flat=True
|
'username', flat=True
|
||||||
|
@ -29,7 +27,7 @@ class PushAccountManager(PushOrVerifyHostCallbackMixin, AccountBasePlaybookManag
|
||||||
create_account_objs = [
|
create_account_objs = [
|
||||||
Account(
|
Account(
|
||||||
name=f'{username}-{secret_type}', username=username,
|
name=f'{username}-{secret_type}', username=username,
|
||||||
secret=secret, secret_type=secret_type, asset=asset,
|
secret_type=secret_type, asset=asset,
|
||||||
)
|
)
|
||||||
for username in create_usernames
|
for username in create_usernames
|
||||||
]
|
]
|
||||||
|
@ -50,6 +48,68 @@ class PushAccountManager(PushOrVerifyHostCallbackMixin, AccountBasePlaybookManag
|
||||||
)
|
)
|
||||||
return accounts
|
return accounts
|
||||||
|
|
||||||
|
def host_callback(self, host, asset=None, account=None, automation=None, path_dir=None, **kwargs):
|
||||||
|
host = super(ChangeSecretManager, self).host_callback(
|
||||||
|
host, asset=asset, account=account, automation=automation,
|
||||||
|
path_dir=path_dir, **kwargs
|
||||||
|
)
|
||||||
|
if host.get('error'):
|
||||||
|
return host
|
||||||
|
|
||||||
|
accounts = asset.accounts.all()
|
||||||
|
accounts = self.get_accounts(account, accounts)
|
||||||
|
|
||||||
|
inventory_hosts = []
|
||||||
|
host['secret_type'] = self.secret_type
|
||||||
|
for account in accounts:
|
||||||
|
h = deepcopy(host)
|
||||||
|
h['name'] += '_' + account.username
|
||||||
|
new_secret = self.get_secret()
|
||||||
|
|
||||||
|
private_key_path = None
|
||||||
|
if self.secret_type == SecretType.SSH_KEY:
|
||||||
|
private_key_path = self.generate_private_key_path(new_secret, path_dir)
|
||||||
|
new_secret = self.generate_public_key(new_secret)
|
||||||
|
|
||||||
|
self.name_recorder_mapper[h['name']] = {
|
||||||
|
'account': account, 'new_secret': new_secret,
|
||||||
|
}
|
||||||
|
|
||||||
|
h['kwargs'] = self.get_kwargs(account, new_secret)
|
||||||
|
h['account'] = {
|
||||||
|
'name': account.name,
|
||||||
|
'username': account.username,
|
||||||
|
'secret_type': account.secret_type,
|
||||||
|
'secret': new_secret,
|
||||||
|
'private_key_path': private_key_path
|
||||||
|
}
|
||||||
|
if asset.platform.type == 'oracle':
|
||||||
|
h['account']['mode'] = 'sysdba' if account.privileged else None
|
||||||
|
inventory_hosts.append(h)
|
||||||
|
return inventory_hosts
|
||||||
|
|
||||||
|
def on_host_success(self, host, result):
|
||||||
|
account_info = self.name_recorder_mapper.get(host)
|
||||||
|
if not account_info:
|
||||||
|
return
|
||||||
|
account = account_info['account']
|
||||||
|
new_secret = account_info['new_secret']
|
||||||
|
if not account:
|
||||||
|
return
|
||||||
|
account.secret = new_secret
|
||||||
|
account.save(update_fields=['secret'])
|
||||||
|
|
||||||
|
def on_host_error(self, host, error, result):
|
||||||
|
pass
|
||||||
|
|
||||||
|
def on_runner_failed(self, runner, e):
|
||||||
|
logger.error("Pust account error: ", e)
|
||||||
|
|
||||||
|
def run(self, *args, **kwargs):
|
||||||
|
if not self.check_secret():
|
||||||
|
return
|
||||||
|
super().run(*args, **kwargs)
|
||||||
|
|
||||||
# @classmethod
|
# @classmethod
|
||||||
# def trigger_by_asset_create(cls, asset):
|
# def trigger_by_asset_create(cls, asset):
|
||||||
# automations = PushAccountAutomation.objects.filter(
|
# automations = PushAccountAutomation.objects.filter(
|
||||||
|
|
|
@ -3,6 +3,7 @@
|
||||||
vars:
|
vars:
|
||||||
ansible_python_interpreter: /usr/local/bin/python
|
ansible_python_interpreter: /usr/local/bin/python
|
||||||
|
|
||||||
|
|
||||||
tasks:
|
tasks:
|
||||||
- name: Verify account
|
- name: Verify account
|
||||||
community.postgresql.postgresql_ping:
|
community.postgresql.postgresql_ping:
|
||||||
|
@ -11,3 +12,5 @@
|
||||||
login_host: "{{ jms_asset.address }}"
|
login_host: "{{ jms_asset.address }}"
|
||||||
login_port: "{{ jms_asset.port }}"
|
login_port: "{{ jms_asset.port }}"
|
||||||
db: "{{ jms_asset.spec_info.db_name }}"
|
db: "{{ jms_asset.spec_info.db_name }}"
|
||||||
|
register: result
|
||||||
|
failed_when: not result.is_available
|
||||||
|
|
|
@ -2,12 +2,12 @@ from django.db.models import QuerySet
|
||||||
|
|
||||||
from accounts.const import AutomationTypes, Connectivity
|
from accounts.const import AutomationTypes, Connectivity
|
||||||
from common.utils import get_logger
|
from common.utils import get_logger
|
||||||
from ..base.manager import PushOrVerifyHostCallbackMixin, AccountBasePlaybookManager
|
from ..base.manager import VerifyHostCallbackMixin, AccountBasePlaybookManager
|
||||||
|
|
||||||
logger = get_logger(__name__)
|
logger = get_logger(__name__)
|
||||||
|
|
||||||
|
|
||||||
class VerifyAccountManager(PushOrVerifyHostCallbackMixin, AccountBasePlaybookManager):
|
class VerifyAccountManager(VerifyHostCallbackMixin, AccountBasePlaybookManager):
|
||||||
|
|
||||||
def __init__(self, *args, **kwargs):
|
def __init__(self, *args, **kwargs):
|
||||||
super().__init__(*args, **kwargs)
|
super().__init__(*args, **kwargs)
|
||||||
|
|
|
@ -1,7 +1,5 @@
|
||||||
from rest_framework.decorators import action
|
from rest_framework.decorators import action
|
||||||
from rest_framework.response import Response
|
from rest_framework.response import Response
|
||||||
|
|
||||||
from common.utils import reverse
|
|
||||||
from orgs.mixins.api import OrgBulkModelViewSet
|
from orgs.mixins.api import OrgBulkModelViewSet
|
||||||
from .. import models, serializers
|
from .. import models, serializers
|
||||||
|
|
||||||
|
@ -36,4 +34,4 @@ class CommandFilterACLViewSet(OrgBulkModelViewSet):
|
||||||
}
|
}
|
||||||
ticket = serializer.cmd_filter_acl.create_command_review_ticket(**data)
|
ticket = serializer.cmd_filter_acl.create_command_review_ticket(**data)
|
||||||
info = ticket.get_extra_info_of_review(user=request.user)
|
info = ticket.get_extra_info_of_review(user=request.user)
|
||||||
return info
|
return Response(data=info)
|
||||||
|
|
|
@ -113,14 +113,14 @@ class UserAssetAccountBaseACL(BaseACL, OrgModelMixin):
|
||||||
org_id = None
|
org_id = None
|
||||||
if user:
|
if user:
|
||||||
queryset = queryset.filter_user(user.username)
|
queryset = queryset.filter_user(user.username)
|
||||||
if asset:
|
|
||||||
org_id = asset.org_id
|
|
||||||
queryset = queryset.filter_asset(asset.name, asset.address)
|
|
||||||
if account:
|
if account:
|
||||||
org_id = account.org_id
|
org_id = account.org_id
|
||||||
queryset = queryset.filter_account(account.username)
|
queryset = queryset.filter_account(account.username)
|
||||||
if account_username:
|
if account_username:
|
||||||
queryset = queryset.filter_account(username=account_username)
|
queryset = queryset.filter_account(username=account_username)
|
||||||
|
if asset:
|
||||||
|
org_id = asset.org_id
|
||||||
|
queryset = queryset.filter_asset(asset.name, asset.address)
|
||||||
if org_id:
|
if org_id:
|
||||||
kwargs['org_id'] = org_id
|
kwargs['org_id'] = org_id
|
||||||
if kwargs:
|
if kwargs:
|
||||||
|
|
|
@ -22,7 +22,7 @@ class LoginACLSerializer(BulkModelSerializer):
|
||||||
reviewers = ObjectRelatedField(
|
reviewers = ObjectRelatedField(
|
||||||
queryset=User.objects, label=_("Reviewers"), many=True, required=False
|
queryset=User.objects, label=_("Reviewers"), many=True, required=False
|
||||||
)
|
)
|
||||||
action = LabeledChoiceField(choices=LoginACL.ActionChoices.choices)
|
action = LabeledChoiceField(choices=LoginACL.ActionChoices.choices, label=_('Action'))
|
||||||
reviewers_amount = serializers.IntegerField(
|
reviewers_amount = serializers.IntegerField(
|
||||||
read_only=True, source="reviewers.count", label=_("Reviewers amount")
|
read_only=True, source="reviewers.count", label=_("Reviewers amount")
|
||||||
)
|
)
|
||||||
|
|
|
@ -25,6 +25,7 @@ class PlaybookCallback(DefaultCallback):
|
||||||
class BasePlaybookManager:
|
class BasePlaybookManager:
|
||||||
bulk_size = 100
|
bulk_size = 100
|
||||||
ansible_account_policy = 'privileged_first'
|
ansible_account_policy = 'privileged_first'
|
||||||
|
ansible_account_prefer = 'root,Administrator'
|
||||||
|
|
||||||
def __init__(self, execution):
|
def __init__(self, execution):
|
||||||
self.execution = execution
|
self.execution = execution
|
||||||
|
@ -123,6 +124,7 @@ class BasePlaybookManager:
|
||||||
def generate_inventory(self, platformed_assets, inventory_path):
|
def generate_inventory(self, platformed_assets, inventory_path):
|
||||||
inventory = JMSInventory(
|
inventory = JMSInventory(
|
||||||
assets=platformed_assets,
|
assets=platformed_assets,
|
||||||
|
account_prefer=self.ansible_account_prefer,
|
||||||
account_policy=self.ansible_account_policy,
|
account_policy=self.ansible_account_policy,
|
||||||
host_callback=self.host_callback,
|
host_callback=self.host_callback,
|
||||||
)
|
)
|
||||||
|
@ -172,7 +174,7 @@ class BasePlaybookManager:
|
||||||
pass
|
pass
|
||||||
|
|
||||||
def on_host_error(self, host, error, result):
|
def on_host_error(self, host, error, result):
|
||||||
pass
|
print('host error: {} -> {}'.format(host, error))
|
||||||
|
|
||||||
def on_runner_success(self, runner, cb):
|
def on_runner_success(self, runner, cb):
|
||||||
summary = cb.summary
|
summary = cb.summary
|
||||||
|
@ -198,8 +200,11 @@ class BasePlaybookManager:
|
||||||
runners = self.get_runners()
|
runners = self.get_runners()
|
||||||
if len(runners) > 1:
|
if len(runners) > 1:
|
||||||
print("### 分批次执行开始任务, 总共 {}\n".format(len(runners)))
|
print("### 分批次执行开始任务, 总共 {}\n".format(len(runners)))
|
||||||
else:
|
elif len(runners) == 1:
|
||||||
print(">>> 开始执行任务\n")
|
print(">>> 开始执行任务\n")
|
||||||
|
else:
|
||||||
|
print("### 没有需要执行的任务\n")
|
||||||
|
return
|
||||||
|
|
||||||
self.execution.date_start = timezone.now()
|
self.execution.date_start = timezone.now()
|
||||||
for i, runner in enumerate(runners, start=1):
|
for i, runner in enumerate(runners, start=1):
|
||||||
|
|
|
@ -11,3 +11,5 @@
|
||||||
login_host: "{{ jms_asset.address }}"
|
login_host: "{{ jms_asset.address }}"
|
||||||
login_port: "{{ jms_asset.port }}"
|
login_port: "{{ jms_asset.port }}"
|
||||||
login_db: "{{ jms_asset.spec_info.db_name }}"
|
login_db: "{{ jms_asset.spec_info.db_name }}"
|
||||||
|
register: result
|
||||||
|
failed_when: not result.is_available
|
||||||
|
|
|
@ -30,7 +30,7 @@ class DatabaseTypes(BaseType):
|
||||||
'ansible_connection': 'local',
|
'ansible_connection': 'local',
|
||||||
},
|
},
|
||||||
'ping_enabled': True,
|
'ping_enabled': True,
|
||||||
'gather_facts_enabled': True,
|
'gather_facts_enabled': False,
|
||||||
'gather_accounts_enabled': True,
|
'gather_accounts_enabled': True,
|
||||||
'verify_account_enabled': True,
|
'verify_account_enabled': True,
|
||||||
'change_secret_enabled': True,
|
'change_secret_enabled': True,
|
||||||
|
|
|
@ -65,7 +65,7 @@ class AssetAccountSerializer(
|
||||||
class Meta:
|
class Meta:
|
||||||
model = Account
|
model = Account
|
||||||
fields_mini = [
|
fields_mini = [
|
||||||
'id', 'name', 'username', 'privileged',
|
'id', 'name', 'username', 'privileged', 'is_active',
|
||||||
'version', 'secret_type',
|
'version', 'secret_type',
|
||||||
]
|
]
|
||||||
fields_write_only = [
|
fields_write_only = [
|
||||||
|
@ -76,6 +76,12 @@ class AssetAccountSerializer(
|
||||||
'secret': {'write_only': True},
|
'secret': {'write_only': True},
|
||||||
}
|
}
|
||||||
|
|
||||||
|
def validate_push_now(self, value):
|
||||||
|
request = self.context['request']
|
||||||
|
if not request.user.has_perms('assets.push_assetaccount'):
|
||||||
|
return False
|
||||||
|
return value
|
||||||
|
|
||||||
def validate_name(self, value):
|
def validate_name(self, value):
|
||||||
if not value:
|
if not value:
|
||||||
value = self.initial_data.get('username')
|
value = self.initial_data.get('username')
|
||||||
|
|
|
@ -99,8 +99,9 @@ def on_asset_post_delete(instance: Asset, using, **kwargs):
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
def resend_to_asset_signals(sender, signal, **kwargs):
|
@on_transaction_commit
|
||||||
signal.send(sender=Asset, **kwargs)
|
def resend_to_asset_signals(sender, signal, instance, **kwargs):
|
||||||
|
signal.send(sender=Asset, instance=instance.asset_ptr, **kwargs)
|
||||||
|
|
||||||
|
|
||||||
for model in (Host, Database, Device, Web, Cloud):
|
for model in (Host, Database, Device, Web, Cloud):
|
||||||
|
|
|
@ -1,6 +1,6 @@
|
||||||
# -*- coding: utf-8 -*-
|
# -*- coding: utf-8 -*-
|
||||||
from urllib3.exceptions import MaxRetryError
|
|
||||||
from urllib.parse import urlencode
|
from urllib.parse import urlencode
|
||||||
|
from urllib3.exceptions import MaxRetryError, LocationParseError
|
||||||
|
|
||||||
from kubernetes import client
|
from kubernetes import client
|
||||||
from kubernetes.client import api_client
|
from kubernetes.client import api_client
|
||||||
|
@ -8,7 +8,7 @@ from kubernetes.client.api import core_v1_api
|
||||||
from kubernetes.client.exceptions import ApiException
|
from kubernetes.client.exceptions import ApiException
|
||||||
|
|
||||||
from common.utils import get_logger
|
from common.utils import get_logger
|
||||||
|
from common.exceptions import JMSException
|
||||||
from ..const import CloudTypes, Category
|
from ..const import CloudTypes, Category
|
||||||
|
|
||||||
logger = get_logger(__file__)
|
logger = get_logger(__file__)
|
||||||
|
@ -58,15 +58,21 @@ class KubernetesClient:
|
||||||
api = self.get_api()
|
api = self.get_api()
|
||||||
try:
|
try:
|
||||||
ret = api.list_pod_for_all_namespaces(watch=False, _request_timeout=(3, 3))
|
ret = api.list_pod_for_all_namespaces(watch=False, _request_timeout=(3, 3))
|
||||||
|
except LocationParseError as e:
|
||||||
|
logger.warning("Kubernetes API request url error: {}".format(e))
|
||||||
|
raise JMSException(code='k8s_tree_error', detail=e)
|
||||||
except MaxRetryError:
|
except MaxRetryError:
|
||||||
logger.warning('Kubernetes connection timed out')
|
msg = "Kubernetes API request timeout"
|
||||||
return
|
logger.warning(msg)
|
||||||
|
raise JMSException(code='k8s_tree_error', detail=msg)
|
||||||
except ApiException as e:
|
except ApiException as e:
|
||||||
if e.status == 401:
|
if e.status == 401:
|
||||||
logger.warning('Kubernetes User not authenticated')
|
msg = "Kubernetes API request unauthorized"
|
||||||
|
logger.warning(msg)
|
||||||
else:
|
else:
|
||||||
logger.warning(e)
|
msg = e
|
||||||
return
|
logger.warning(msg)
|
||||||
|
raise JMSException(code='k8s_tree_error', detail=msg)
|
||||||
data = {}
|
data = {}
|
||||||
for i in ret.items:
|
for i in ret.items:
|
||||||
namespace = i.metadata.namespace
|
namespace = i.metadata.namespace
|
||||||
|
|
|
@ -3,6 +3,7 @@
|
||||||
from importlib import import_module
|
from importlib import import_module
|
||||||
|
|
||||||
from django.conf import settings
|
from django.conf import settings
|
||||||
|
from django.db.models import F, Value, CharField
|
||||||
from rest_framework import generics
|
from rest_framework import generics
|
||||||
from rest_framework.permissions import IsAuthenticated
|
from rest_framework.permissions import IsAuthenticated
|
||||||
from rest_framework.mixins import ListModelMixin, CreateModelMixin, RetrieveModelMixin
|
from rest_framework.mixins import ListModelMixin, CreateModelMixin, RetrieveModelMixin
|
||||||
|
@ -14,11 +15,12 @@ from common.plugins.es import QuerySet as ESQuerySet
|
||||||
from orgs.utils import current_org, tmp_to_root_org
|
from orgs.utils import current_org, tmp_to_root_org
|
||||||
from orgs.mixins.api import OrgGenericViewSet, OrgBulkModelViewSet
|
from orgs.mixins.api import OrgGenericViewSet, OrgBulkModelViewSet
|
||||||
from .backends import TYPE_ENGINE_MAPPING
|
from .backends import TYPE_ENGINE_MAPPING
|
||||||
from .models import FTPLog, UserLoginLog, OperateLog, PasswordChangeLog
|
from .const import ActivityChoices
|
||||||
|
from .models import FTPLog, UserLoginLog, OperateLog, PasswordChangeLog, ActivityLog
|
||||||
from .serializers import FTPLogSerializer, UserLoginLogSerializer, JobAuditLogSerializer
|
from .serializers import FTPLogSerializer, UserLoginLogSerializer, JobAuditLogSerializer
|
||||||
from .serializers import (
|
from .serializers import (
|
||||||
OperateLogSerializer, OperateLogActionDetailSerializer,
|
OperateLogSerializer, OperateLogActionDetailSerializer,
|
||||||
PasswordChangeLogSerializer, ActivitiesOperatorLogSerializer,
|
PasswordChangeLogSerializer, ActivityOperatorLogSerializer,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@ -47,8 +49,8 @@ class UserLoginCommonMixin:
|
||||||
date_range_filter_fields = [
|
date_range_filter_fields = [
|
||||||
('datetime', ('date_from', 'date_to'))
|
('datetime', ('date_from', 'date_to'))
|
||||||
]
|
]
|
||||||
filterset_fields = ['username', 'ip', 'city', 'type', 'status', 'mfa']
|
filterset_fields = ['id', 'username', 'ip', 'city', 'type', 'status', 'mfa']
|
||||||
search_fields = ['username', 'ip', 'city']
|
search_fields = ['id', 'username', 'ip', 'city']
|
||||||
|
|
||||||
|
|
||||||
class UserLoginLogViewSet(UserLoginCommonMixin, ListModelMixin, JMSGenericViewSet):
|
class UserLoginLogViewSet(UserLoginCommonMixin, ListModelMixin, JMSGenericViewSet):
|
||||||
|
@ -77,17 +79,42 @@ class MyLoginLogAPIView(UserLoginCommonMixin, generics.ListAPIView):
|
||||||
|
|
||||||
|
|
||||||
class ResourceActivityAPIView(generics.ListAPIView):
|
class ResourceActivityAPIView(generics.ListAPIView):
|
||||||
serializer_class = ActivitiesOperatorLogSerializer
|
serializer_class = ActivityOperatorLogSerializer
|
||||||
rbac_perms = {
|
rbac_perms = {
|
||||||
'GET': 'audits.view_operatelog',
|
'GET': 'audits.view_activitylog',
|
||||||
}
|
}
|
||||||
|
|
||||||
def get_queryset(self):
|
@staticmethod
|
||||||
resource_id = self.request.query_params.get('resource_id')
|
def get_operate_log_qs(fields, limit=30, **filters):
|
||||||
with tmp_to_root_org():
|
queryset = OperateLog.objects.filter(**filters).annotate(
|
||||||
queryset = OperateLog.objects.filter(resource_id=resource_id)[:30]
|
r_type=Value(ActivityChoices.operate_log, CharField()),
|
||||||
|
r_detail_id=F('id'), r_detail=Value(None, CharField()),
|
||||||
|
r_user=F('user'), r_action=F('action'),
|
||||||
|
).values(*fields)[:limit]
|
||||||
return queryset
|
return queryset
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def get_activity_log_qs(fields, limit=30, **filters):
|
||||||
|
queryset = ActivityLog.objects.filter(**filters).annotate(
|
||||||
|
r_type=F('type'), r_detail_id=F('detail_id'),
|
||||||
|
r_detail=F('detail'), r_user=Value(None, CharField()),
|
||||||
|
r_action=Value(None, CharField()),
|
||||||
|
).values(*fields)[:limit]
|
||||||
|
return queryset
|
||||||
|
|
||||||
|
def get_queryset(self):
|
||||||
|
limit = 30
|
||||||
|
resource_id = self.request.query_params.get('resource_id')
|
||||||
|
fields = (
|
||||||
|
'id', 'datetime', 'r_detail', 'r_detail_id',
|
||||||
|
'r_user', 'r_action', 'r_type'
|
||||||
|
)
|
||||||
|
with tmp_to_root_org():
|
||||||
|
qs1 = self.get_operate_log_qs(fields, resource_id=resource_id)
|
||||||
|
qs2 = self.get_activity_log_qs(fields, resource_id=resource_id)
|
||||||
|
queryset = qs2.union(qs1)
|
||||||
|
return queryset[:limit]
|
||||||
|
|
||||||
|
|
||||||
class OperateLogViewSet(RetrieveModelMixin, ListModelMixin, OrgGenericViewSet):
|
class OperateLogViewSet(RetrieveModelMixin, ListModelMixin, OrgGenericViewSet):
|
||||||
model = OperateLog
|
model = OperateLog
|
||||||
|
@ -129,10 +156,12 @@ class PasswordChangeLogViewSet(ListModelMixin, JMSGenericViewSet):
|
||||||
ordering = ['-datetime']
|
ordering = ['-datetime']
|
||||||
|
|
||||||
def get_queryset(self):
|
def get_queryset(self):
|
||||||
users = current_org.get_members()
|
queryset = super().get_queryset()
|
||||||
queryset = super().get_queryset().filter(
|
if not current_org.is_root():
|
||||||
user__in=[user.__str__() for user in users]
|
users = current_org.get_members()
|
||||||
)
|
queryset = queryset.filter(
|
||||||
|
user__in=[str(user) for user in users]
|
||||||
|
)
|
||||||
return queryset
|
return queryset
|
||||||
|
|
||||||
# Todo: 看看怎么搞
|
# Todo: 看看怎么搞
|
||||||
|
|
|
@ -69,6 +69,11 @@ class OperateLogStore(object):
|
||||||
before.update(op_before)
|
before.update(op_before)
|
||||||
after.update(op_after)
|
after.update(op_after)
|
||||||
else:
|
else:
|
||||||
|
# 限制长度 128 OperateLog.resource.field.max_length, 避免存储失败
|
||||||
|
max_length = 128
|
||||||
|
resource = kwargs.get('resource', '')
|
||||||
|
if resource and isinstance(resource, str):
|
||||||
|
kwargs['resource'] = resource[:max_length]
|
||||||
op_log = self.model(**kwargs)
|
op_log = self.model(**kwargs)
|
||||||
|
|
||||||
diff = self.convert_before_after_to_diff(before, after)
|
diff = self.convert_before_after_to_diff(before, after)
|
||||||
|
|
|
@ -35,6 +35,13 @@ class LoginTypeChoices(TextChoices):
|
||||||
unknown = "U", _("Unknown")
|
unknown = "U", _("Unknown")
|
||||||
|
|
||||||
|
|
||||||
|
class ActivityChoices(TextChoices):
|
||||||
|
operate_log = 'O', _('Operate log')
|
||||||
|
session_log = 'S', _('Session log')
|
||||||
|
login_log = 'L', _('Login log')
|
||||||
|
task = 'T', _('Task')
|
||||||
|
|
||||||
|
|
||||||
class MFAChoices(IntegerChoices):
|
class MFAChoices(IntegerChoices):
|
||||||
disabled = 0, _("Disabled")
|
disabled = 0, _("Disabled")
|
||||||
enabled = 1, _("Enabled")
|
enabled = 1, _("Enabled")
|
||||||
|
|
|
@ -130,58 +130,6 @@ class OperatorLogHandler(metaclass=Singleton):
|
||||||
after = self.__data_processing(after)
|
after = self.__data_processing(after)
|
||||||
return before, after
|
return before, after
|
||||||
|
|
||||||
@staticmethod
|
|
||||||
def _get_Session_params(resource, **kwargs):
|
|
||||||
# 更新会话的日志不在Activity中体现,
|
|
||||||
# 否则会话结束,录像文件结束操作的会话记录都会体现出来
|
|
||||||
params = {}
|
|
||||||
action = kwargs.get('data', {}).get('action', 'create')
|
|
||||||
detail = _(
|
|
||||||
'{} used account[{}], login method[{}] login the asset.'
|
|
||||||
).format(
|
|
||||||
resource.user, resource.account, resource.login_from_display
|
|
||||||
)
|
|
||||||
if action == ActionChoices.create:
|
|
||||||
params = {
|
|
||||||
'action': ActionChoices.connect,
|
|
||||||
'resource_id': str(resource.asset_id),
|
|
||||||
'user': resource.user, 'detail': detail
|
|
||||||
}
|
|
||||||
return params
|
|
||||||
|
|
||||||
@staticmethod
|
|
||||||
def _get_ChangeSecretRecord_params(resource, **kwargs):
|
|
||||||
detail = _(
|
|
||||||
'User {} has executed change auth plan for this account.({})'
|
|
||||||
).format(
|
|
||||||
resource.created_by, _(resource.status.title())
|
|
||||||
)
|
|
||||||
return {
|
|
||||||
'action': ActionChoices.change_auth, 'detail': detail,
|
|
||||||
'resource_id': str(resource.account_id),
|
|
||||||
}
|
|
||||||
|
|
||||||
@staticmethod
|
|
||||||
def _get_UserLoginLog_params(resource, **kwargs):
|
|
||||||
username = resource.username
|
|
||||||
login_status = _('Success') if resource.status else _('Failed')
|
|
||||||
detail = _('User {} login into this service.[{}]').format(
|
|
||||||
resource.username, login_status
|
|
||||||
)
|
|
||||||
user_id = User.objects.filter(username=username).\
|
|
||||||
values_list('id', flat=True)[0]
|
|
||||||
return {
|
|
||||||
'action': ActionChoices.login, 'detail': detail,
|
|
||||||
'resource_id': str(user_id),
|
|
||||||
}
|
|
||||||
|
|
||||||
def _activity_handle(self, data, object_name, resource):
|
|
||||||
param_func = getattr(self, '_get_%s_params' % object_name, None)
|
|
||||||
if param_func is not None:
|
|
||||||
params = param_func(resource, data=data)
|
|
||||||
data.update(params)
|
|
||||||
return data
|
|
||||||
|
|
||||||
def create_or_update_operate_log(
|
def create_or_update_operate_log(
|
||||||
self, action, resource_type, resource=None, resource_display=None,
|
self, action, resource_type, resource=None, resource_display=None,
|
||||||
force=False, log_id=None, before=None, after=None,
|
force=False, log_id=None, before=None, after=None,
|
||||||
|
@ -207,7 +155,6 @@ class OperatorLogHandler(metaclass=Singleton):
|
||||||
'remote_addr': remote_addr, 'before': before, 'after': after,
|
'remote_addr': remote_addr, 'before': before, 'after': after,
|
||||||
'org_id': get_current_org_id(),
|
'org_id': get_current_org_id(),
|
||||||
}
|
}
|
||||||
data = self._activity_handle(data, object_name, resource=resource)
|
|
||||||
with transaction.atomic():
|
with transaction.atomic():
|
||||||
if self.log_client.ping(timeout=1):
|
if self.log_client.ping(timeout=1):
|
||||||
client = self.log_client
|
client = self.log_client
|
||||||
|
|
|
@ -0,0 +1,30 @@
|
||||||
|
# Generated by Django 3.2.16 on 2023-02-07 00:57
|
||||||
|
|
||||||
|
from django.db import migrations, models
|
||||||
|
import uuid
|
||||||
|
|
||||||
|
|
||||||
|
class Migration(migrations.Migration):
|
||||||
|
|
||||||
|
dependencies = [
|
||||||
|
('audits', '0020_auto_20230117_1004'),
|
||||||
|
]
|
||||||
|
|
||||||
|
operations = [
|
||||||
|
migrations.CreateModel(
|
||||||
|
name='ActivityLog',
|
||||||
|
fields=[
|
||||||
|
('org_id', models.CharField(blank=True, db_index=True, default='', max_length=36, verbose_name='Organization')),
|
||||||
|
('id', models.UUIDField(default=uuid.uuid4, primary_key=True, serialize=False)),
|
||||||
|
('type', models.CharField(choices=[('O', 'Operate log'), ('S', 'Session log'), ('L', 'Login log'), ('T', 'Task')], default=None, max_length=2, null=True, verbose_name='Activity type')),
|
||||||
|
('resource_id', models.CharField(blank=True, db_index=True, default='', max_length=36, verbose_name='Resource')),
|
||||||
|
('datetime', models.DateTimeField(auto_now=True, db_index=True, verbose_name='Datetime')),
|
||||||
|
('detail', models.TextField(blank=True, default='', verbose_name='Detail')),
|
||||||
|
('detail_id', models.CharField(default=None, max_length=36, null=True, verbose_name='Detail ID')),
|
||||||
|
],
|
||||||
|
options={
|
||||||
|
'verbose_name': 'Activity log',
|
||||||
|
'ordering': ('-datetime',),
|
||||||
|
},
|
||||||
|
),
|
||||||
|
]
|
|
@ -12,6 +12,7 @@ from orgs.utils import current_org
|
||||||
from .const import (
|
from .const import (
|
||||||
OperateChoices,
|
OperateChoices,
|
||||||
ActionChoices,
|
ActionChoices,
|
||||||
|
ActivityChoices,
|
||||||
LoginTypeChoices,
|
LoginTypeChoices,
|
||||||
MFAChoices,
|
MFAChoices,
|
||||||
LoginStatusChoices,
|
LoginStatusChoices,
|
||||||
|
@ -20,6 +21,7 @@ from .const import (
|
||||||
__all__ = [
|
__all__ = [
|
||||||
"FTPLog",
|
"FTPLog",
|
||||||
"OperateLog",
|
"OperateLog",
|
||||||
|
"ActivityLog",
|
||||||
"PasswordChangeLog",
|
"PasswordChangeLog",
|
||||||
"UserLoginLog",
|
"UserLoginLog",
|
||||||
]
|
]
|
||||||
|
@ -59,7 +61,6 @@ class OperateLog(OrgModelMixin):
|
||||||
remote_addr = models.CharField(max_length=128, verbose_name=_("Remote addr"), blank=True, null=True)
|
remote_addr = models.CharField(max_length=128, verbose_name=_("Remote addr"), blank=True, null=True)
|
||||||
datetime = models.DateTimeField(auto_now=True, verbose_name=_('Datetime'), db_index=True)
|
datetime = models.DateTimeField(auto_now=True, verbose_name=_('Datetime'), db_index=True)
|
||||||
diff = models.JSONField(default=dict, encoder=ModelJSONFieldEncoder, null=True)
|
diff = models.JSONField(default=dict, encoder=ModelJSONFieldEncoder, null=True)
|
||||||
detail = models.CharField(max_length=128, null=True, blank=True, verbose_name=_('Detail'))
|
|
||||||
|
|
||||||
def __str__(self):
|
def __str__(self):
|
||||||
return "<{}> {} <{}>".format(self.user, self.action, self.resource)
|
return "<{}> {} <{}>".format(self.user, self.action, self.resource)
|
||||||
|
@ -93,6 +94,34 @@ class OperateLog(OrgModelMixin):
|
||||||
ordering = ('-datetime',)
|
ordering = ('-datetime',)
|
||||||
|
|
||||||
|
|
||||||
|
class ActivityLog(OrgModelMixin):
|
||||||
|
id = models.UUIDField(default=uuid.uuid4, primary_key=True)
|
||||||
|
type = models.CharField(
|
||||||
|
choices=ActivityChoices.choices, max_length=2,
|
||||||
|
null=True, default=None, verbose_name=_("Activity type"),
|
||||||
|
)
|
||||||
|
resource_id = models.CharField(
|
||||||
|
max_length=36, blank=True, default='',
|
||||||
|
db_index=True, verbose_name=_("Resource")
|
||||||
|
)
|
||||||
|
datetime = models.DateTimeField(
|
||||||
|
auto_now=True, verbose_name=_('Datetime'), db_index=True
|
||||||
|
)
|
||||||
|
detail = models.TextField(default='', blank=True, verbose_name=_('Detail'))
|
||||||
|
detail_id = models.CharField(
|
||||||
|
max_length=36, default=None, null=True, verbose_name=_('Detail ID')
|
||||||
|
)
|
||||||
|
|
||||||
|
class Meta:
|
||||||
|
verbose_name = _("Activity log")
|
||||||
|
ordering = ('-datetime',)
|
||||||
|
|
||||||
|
def save(self, *args, **kwargs):
|
||||||
|
if current_org.is_root() and not self.org_id:
|
||||||
|
self.org_id = Organization.ROOT_ID
|
||||||
|
return super(ActivityLog, self).save(*args, **kwargs)
|
||||||
|
|
||||||
|
|
||||||
class PasswordChangeLog(models.Model):
|
class PasswordChangeLog(models.Model):
|
||||||
id = models.UUIDField(default=uuid.uuid4, primary_key=True)
|
id = models.UUIDField(default=uuid.uuid4, primary_key=True)
|
||||||
user = models.CharField(max_length=128, verbose_name=_("User"))
|
user = models.CharField(max_length=128, verbose_name=_("User"))
|
||||||
|
|
|
@ -5,6 +5,7 @@ from rest_framework import serializers
|
||||||
|
|
||||||
from audits.backends.db import OperateLogStore
|
from audits.backends.db import OperateLogStore
|
||||||
from common.serializers.fields import LabeledChoiceField
|
from common.serializers.fields import LabeledChoiceField
|
||||||
|
from common.utils import reverse
|
||||||
from common.utils.timezone import as_current_tz
|
from common.utils.timezone import as_current_tz
|
||||||
from ops.models.job import JobAuditLog
|
from ops.models.job import JobAuditLog
|
||||||
from ops.serializers.job import JobExecutionSerializer
|
from ops.serializers.job import JobExecutionSerializer
|
||||||
|
@ -13,7 +14,7 @@ from . import models
|
||||||
from .const import (
|
from .const import (
|
||||||
ActionChoices, OperateChoices,
|
ActionChoices, OperateChoices,
|
||||||
MFAChoices, LoginStatusChoices,
|
MFAChoices, LoginStatusChoices,
|
||||||
LoginTypeChoices,
|
LoginTypeChoices, ActivityChoices,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@ -105,19 +106,44 @@ class SessionAuditSerializer(serializers.ModelSerializer):
|
||||||
fields = "__all__"
|
fields = "__all__"
|
||||||
|
|
||||||
|
|
||||||
class ActivitiesOperatorLogSerializer(serializers.Serializer):
|
class ActivityOperatorLogSerializer(serializers.Serializer):
|
||||||
timestamp = serializers.SerializerMethodField()
|
timestamp = serializers.SerializerMethodField()
|
||||||
|
detail_url = serializers.SerializerMethodField()
|
||||||
content = serializers.SerializerMethodField()
|
content = serializers.SerializerMethodField()
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def get_timestamp(obj):
|
def get_timestamp(obj):
|
||||||
return as_current_tz(obj.datetime).strftime('%Y-%m-%d %H:%M:%S')
|
return as_current_tz(obj['datetime']).strftime('%Y-%m-%d %H:%M:%S')
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def get_content(obj):
|
def get_content(obj):
|
||||||
action = obj.action.replace('_', ' ').capitalize()
|
if not obj['r_detail']:
|
||||||
if not obj.detail:
|
action = obj['r_action'].replace('_', ' ').capitalize()
|
||||||
ctn = _('User {} {} this resource.').format(obj.user, _(action))
|
ctn = _('User {} {} this resource.').format(obj['r_user'], _(action))
|
||||||
else:
|
else:
|
||||||
ctn = obj.detail
|
ctn = obj['r_detail']
|
||||||
return ctn
|
return ctn
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def get_detail_url(obj):
|
||||||
|
detail_url = ''
|
||||||
|
detail_id, obj_type = obj['r_detail_id'], obj['r_type']
|
||||||
|
if not detail_id:
|
||||||
|
return detail_url
|
||||||
|
|
||||||
|
if obj_type == ActivityChoices.operate_log:
|
||||||
|
detail_url = reverse(
|
||||||
|
view_name='audits:operate-log-detail',
|
||||||
|
kwargs={'pk': obj['id']},
|
||||||
|
api_to_ui=True, is_audit=True
|
||||||
|
)
|
||||||
|
elif obj_type == ActivityChoices.task:
|
||||||
|
detail_url = reverse(
|
||||||
|
'ops:celery-task-log', kwargs={'pk': detail_id}
|
||||||
|
)
|
||||||
|
elif obj_type == ActivityChoices.login_log:
|
||||||
|
detail_url = '%s?id=%s' % (
|
||||||
|
reverse('api-audits:login-log-list', api_to_ui=True, is_audit=True),
|
||||||
|
detail_id
|
||||||
|
)
|
||||||
|
return detail_url
|
||||||
|
|
|
@ -1,328 +0,0 @@
|
||||||
# -*- coding: utf-8 -*-
|
|
||||||
#
|
|
||||||
import uuid
|
|
||||||
|
|
||||||
from celery import shared_task
|
|
||||||
from django.apps import apps
|
|
||||||
from django.conf import settings
|
|
||||||
from django.contrib.auth import BACKEND_SESSION_KEY
|
|
||||||
from django.db import transaction
|
|
||||||
from django.db.models.signals import pre_delete, pre_save, m2m_changed, post_save
|
|
||||||
from django.dispatch import receiver
|
|
||||||
from django.utils import timezone, translation
|
|
||||||
from django.utils.functional import LazyObject
|
|
||||||
from django.utils.translation import ugettext_lazy as _
|
|
||||||
from rest_framework.renderers import JSONRenderer
|
|
||||||
from rest_framework.request import Request
|
|
||||||
|
|
||||||
from audits.handler import (
|
|
||||||
get_instance_current_with_cache_diff, cache_instance_before_data,
|
|
||||||
create_or_update_operate_log, get_instance_dict_from_cache
|
|
||||||
)
|
|
||||||
from audits.utils import model_to_dict_for_operate_log as model_to_dict
|
|
||||||
from authentication.signals import post_auth_failed, post_auth_success
|
|
||||||
from authentication.utils import check_different_city_login_if_need
|
|
||||||
from common.const.signals import POST_ADD, POST_REMOVE, POST_CLEAR, SKIP_SIGNAL
|
|
||||||
from common.signals import django_ready
|
|
||||||
from common.utils import get_request_ip, get_logger, get_syslogger
|
|
||||||
from common.utils.encode import data_to_json
|
|
||||||
from jumpserver.utils import current_request
|
|
||||||
from orgs.utils import org_aware_func
|
|
||||||
from terminal.models import Session, Command
|
|
||||||
from terminal.serializers import SessionSerializer, SessionCommandSerializer
|
|
||||||
from users.models import User
|
|
||||||
from users.signals import post_user_change_password
|
|
||||||
from . import models, serializers
|
|
||||||
from .const import MODELS_NEED_RECORD, ActionChoices
|
|
||||||
from .utils import write_login_log
|
|
||||||
|
|
||||||
logger = get_logger(__name__)
|
|
||||||
sys_logger = get_syslogger(__name__)
|
|
||||||
json_render = JSONRenderer()
|
|
||||||
|
|
||||||
|
|
||||||
class AuthBackendLabelMapping(LazyObject):
|
|
||||||
@staticmethod
|
|
||||||
def get_login_backends():
|
|
||||||
backend_label_mapping = {}
|
|
||||||
for source, backends in User.SOURCE_BACKEND_MAPPING.items():
|
|
||||||
for backend in backends:
|
|
||||||
backend_label_mapping[backend] = source.label
|
|
||||||
backend_label_mapping[settings.AUTH_BACKEND_PUBKEY] = _("SSH Key")
|
|
||||||
backend_label_mapping[settings.AUTH_BACKEND_MODEL] = _("Password")
|
|
||||||
backend_label_mapping[settings.AUTH_BACKEND_SSO] = _("SSO")
|
|
||||||
backend_label_mapping[settings.AUTH_BACKEND_AUTH_TOKEN] = _("Auth Token")
|
|
||||||
backend_label_mapping[settings.AUTH_BACKEND_WECOM] = _("WeCom")
|
|
||||||
backend_label_mapping[settings.AUTH_BACKEND_FEISHU] = _("FeiShu")
|
|
||||||
backend_label_mapping[settings.AUTH_BACKEND_DINGTALK] = _("DingTalk")
|
|
||||||
backend_label_mapping[settings.AUTH_BACKEND_TEMP_TOKEN] = _("Temporary token")
|
|
||||||
return backend_label_mapping
|
|
||||||
|
|
||||||
def _setup(self):
|
|
||||||
self._wrapped = self.get_login_backends()
|
|
||||||
|
|
||||||
|
|
||||||
AUTH_BACKEND_LABEL_MAPPING = AuthBackendLabelMapping()
|
|
||||||
|
|
||||||
M2M_ACTION = {
|
|
||||||
POST_ADD: ActionChoices.create,
|
|
||||||
POST_REMOVE: ActionChoices.delete,
|
|
||||||
POST_CLEAR: ActionChoices.delete,
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
@shared_task(verbose_name=_("Create m2m operate log"))
|
|
||||||
@org_aware_func('instance')
|
|
||||||
def create_m2m_operate_log(instance, action, model, pk_set):
|
|
||||||
current_instance = model_to_dict(instance, include_model_fields=False)
|
|
||||||
resource_type = instance._meta.verbose_name
|
|
||||||
field_name = str(model._meta.verbose_name)
|
|
||||||
action = M2M_ACTION[action]
|
|
||||||
instance_id = current_instance.get('id')
|
|
||||||
log_id, before_instance = get_instance_dict_from_cache(instance_id)
|
|
||||||
|
|
||||||
objs = model.objects.filter(pk__in=pk_set)
|
|
||||||
objs_display = [str(o) for o in objs]
|
|
||||||
changed_field = current_instance.get(field_name, [])
|
|
||||||
|
|
||||||
after, before, before_value = None, None, None
|
|
||||||
if action == ActionChoices.create:
|
|
||||||
before_value = list(set(changed_field) - set(objs_display))
|
|
||||||
elif action == ActionChoices.delete:
|
|
||||||
before_value = list(
|
|
||||||
set(changed_field).symmetric_difference(set(objs_display))
|
|
||||||
)
|
|
||||||
|
|
||||||
if changed_field:
|
|
||||||
after = {field_name: changed_field}
|
|
||||||
if before_value:
|
|
||||||
before = {field_name: before_value}
|
|
||||||
|
|
||||||
if sorted(str(before)) == sorted(str(after)):
|
|
||||||
return
|
|
||||||
|
|
||||||
create_or_update_operate_log(
|
|
||||||
ActionChoices.update, resource_type,
|
|
||||||
resource=instance, log_id=log_id,
|
|
||||||
before=before, after=after
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
@receiver(m2m_changed)
|
|
||||||
def on_m2m_changed(sender, action, instance, model, pk_set, **kwargs):
|
|
||||||
if action not in M2M_ACTION:
|
|
||||||
return
|
|
||||||
if not instance:
|
|
||||||
return
|
|
||||||
create_m2m_operate_log.delay(instance, action, model, pk_set)
|
|
||||||
|
|
||||||
|
|
||||||
def signal_of_operate_log_whether_continue(sender, instance, created, update_fields=None):
|
|
||||||
condition = True
|
|
||||||
if not instance:
|
|
||||||
condition = False
|
|
||||||
if instance and getattr(instance, SKIP_SIGNAL, False):
|
|
||||||
condition = False
|
|
||||||
# 终端模型的 create 事件由系统产生,不记录
|
|
||||||
if instance._meta.object_name == 'Terminal' and created:
|
|
||||||
condition = False
|
|
||||||
# last_login 改变是最后登录日期, 每次登录都会改变
|
|
||||||
if instance._meta.object_name == 'User' and update_fields and 'last_login' in update_fields:
|
|
||||||
condition = False
|
|
||||||
# 不在记录白名单中,跳过
|
|
||||||
if sender._meta.object_name not in MODELS_NEED_RECORD:
|
|
||||||
condition = False
|
|
||||||
return condition
|
|
||||||
|
|
||||||
|
|
||||||
@shared_task(verbose_name=_("Create operate log"))
|
|
||||||
@org_aware_func('instance')
|
|
||||||
def create_operate_log(instance, created, update_fields=None):
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
@receiver(pre_save)
|
|
||||||
def on_object_pre_create_or_update(sender, instance=None, update_fields=None, **kwargs):
|
|
||||||
ok = signal_of_operate_log_whether_continue(
|
|
||||||
sender, instance, False, update_fields
|
|
||||||
)
|
|
||||||
if not ok:
|
|
||||||
return
|
|
||||||
|
|
||||||
instance_id = getattr(instance, 'pk', None)
|
|
||||||
instance_before_data = {'id': instance_id}
|
|
||||||
raw_instance = type(instance).objects.filter(pk=instance_id).first()
|
|
||||||
|
|
||||||
if raw_instance:
|
|
||||||
instance_before_data = model_to_dict(raw_instance)
|
|
||||||
operate_log_id = str(uuid.uuid4())
|
|
||||||
instance_before_data['operate_log_id'] = operate_log_id
|
|
||||||
setattr(instance, 'operate_log_id', operate_log_id)
|
|
||||||
cache_instance_before_data(instance_before_data)
|
|
||||||
|
|
||||||
|
|
||||||
@receiver(post_save)
|
|
||||||
def on_object_created_or_update(sender, instance=None, created=False, update_fields=None, **kwargs):
|
|
||||||
ok = signal_of_operate_log_whether_continue(
|
|
||||||
sender, instance, created, update_fields
|
|
||||||
)
|
|
||||||
if not ok:
|
|
||||||
return
|
|
||||||
|
|
||||||
log_id, before, after = None, None, None
|
|
||||||
if created:
|
|
||||||
action = models.ActionChoices.create
|
|
||||||
after = model_to_dict(instance)
|
|
||||||
log_id = getattr(instance, 'operate_log_id', None)
|
|
||||||
else:
|
|
||||||
action = ActionChoices.update
|
|
||||||
current_instance = model_to_dict(instance)
|
|
||||||
log_id, before, after = get_instance_current_with_cache_diff(current_instance)
|
|
||||||
|
|
||||||
resource_type = sender._meta.verbose_name
|
|
||||||
object_name = sender._meta.object_name
|
|
||||||
create_or_update_operate_log(
|
|
||||||
action, resource_type, resource=instance, log_id=log_id,
|
|
||||||
before=before, after=after, object_name=object_name
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
@receiver(pre_delete)
|
|
||||||
def on_object_delete(sender, instance=None, **kwargs):
|
|
||||||
ok = signal_of_operate_log_whether_continue(sender, instance, False)
|
|
||||||
if not ok:
|
|
||||||
return
|
|
||||||
|
|
||||||
resource_type = sender._meta.verbose_name
|
|
||||||
create_or_update_operate_log(
|
|
||||||
ActionChoices.delete, resource_type,
|
|
||||||
resource=instance, before=model_to_dict(instance)
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
@receiver(post_user_change_password, sender=User)
|
|
||||||
def on_user_change_password(sender, user=None, **kwargs):
|
|
||||||
if not current_request:
|
|
||||||
remote_addr = '127.0.0.1'
|
|
||||||
change_by = 'System'
|
|
||||||
else:
|
|
||||||
remote_addr = get_request_ip(current_request)
|
|
||||||
if not current_request.user.is_authenticated:
|
|
||||||
change_by = str(user)
|
|
||||||
else:
|
|
||||||
change_by = str(current_request.user)
|
|
||||||
with transaction.atomic():
|
|
||||||
models.PasswordChangeLog.objects.create(
|
|
||||||
user=str(user), change_by=change_by,
|
|
||||||
remote_addr=remote_addr,
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
def on_audits_log_create(sender, instance=None, **kwargs):
|
|
||||||
if sender == models.UserLoginLog:
|
|
||||||
category = "login_log"
|
|
||||||
serializer_cls = serializers.UserLoginLogSerializer
|
|
||||||
elif sender == models.FTPLog:
|
|
||||||
category = "ftp_log"
|
|
||||||
serializer_cls = serializers.FTPLogSerializer
|
|
||||||
elif sender == models.OperateLog:
|
|
||||||
category = "operation_log"
|
|
||||||
serializer_cls = serializers.OperateLogSerializer
|
|
||||||
elif sender == models.PasswordChangeLog:
|
|
||||||
category = "password_change_log"
|
|
||||||
serializer_cls = serializers.PasswordChangeLogSerializer
|
|
||||||
elif sender == Session:
|
|
||||||
category = "host_session_log"
|
|
||||||
serializer_cls = SessionSerializer
|
|
||||||
elif sender == Command:
|
|
||||||
category = "session_command_log"
|
|
||||||
serializer_cls = SessionCommandSerializer
|
|
||||||
else:
|
|
||||||
return
|
|
||||||
|
|
||||||
serializer = serializer_cls(instance)
|
|
||||||
data = data_to_json(serializer.data, indent=None)
|
|
||||||
msg = "{} - {}".format(category, data)
|
|
||||||
sys_logger.info(msg)
|
|
||||||
|
|
||||||
|
|
||||||
def get_login_backend(request):
|
|
||||||
backend = request.session.get('auth_backend', '') or \
|
|
||||||
request.session.get(BACKEND_SESSION_KEY, '')
|
|
||||||
|
|
||||||
backend_label = AUTH_BACKEND_LABEL_MAPPING.get(backend, None)
|
|
||||||
if backend_label is None:
|
|
||||||
backend_label = ''
|
|
||||||
return backend_label
|
|
||||||
|
|
||||||
|
|
||||||
def generate_data(username, request, login_type=None):
|
|
||||||
user_agent = request.META.get('HTTP_USER_AGENT', '')
|
|
||||||
login_ip = get_request_ip(request) or '0.0.0.0'
|
|
||||||
|
|
||||||
if login_type is None and isinstance(request, Request):
|
|
||||||
login_type = request.META.get('HTTP_X_JMS_LOGIN_TYPE', 'U')
|
|
||||||
if login_type is None:
|
|
||||||
login_type = 'W'
|
|
||||||
|
|
||||||
with translation.override('en'):
|
|
||||||
backend = str(get_login_backend(request))
|
|
||||||
|
|
||||||
data = {
|
|
||||||
'username': username,
|
|
||||||
'ip': login_ip,
|
|
||||||
'type': login_type,
|
|
||||||
'user_agent': user_agent[0:254],
|
|
||||||
'datetime': timezone.now(),
|
|
||||||
'backend': backend,
|
|
||||||
}
|
|
||||||
return data
|
|
||||||
|
|
||||||
|
|
||||||
@receiver(post_auth_success)
|
|
||||||
def on_user_auth_success(sender, user, request, login_type=None, **kwargs):
|
|
||||||
logger.debug('User login success: {}'.format(user.username))
|
|
||||||
check_different_city_login_if_need(user, request)
|
|
||||||
data = generate_data(user.username, request, login_type=login_type)
|
|
||||||
request.session['login_time'] = data['datetime'].strftime("%Y-%m-%d %H:%M:%S")
|
|
||||||
data.update({'mfa': int(user.mfa_enabled), 'status': True})
|
|
||||||
write_login_log(**data)
|
|
||||||
|
|
||||||
|
|
||||||
@receiver(post_auth_failed)
|
|
||||||
def on_user_auth_failed(sender, username, request, reason='', **kwargs):
|
|
||||||
logger.debug('User login failed: {}'.format(username))
|
|
||||||
data = generate_data(username, request)
|
|
||||||
data.update({'reason': reason[:128], 'status': False})
|
|
||||||
write_login_log(**data)
|
|
||||||
|
|
||||||
|
|
||||||
@receiver(django_ready)
|
|
||||||
def on_django_start_set_operate_log_monitor_models(sender, **kwargs):
|
|
||||||
exclude_apps = {
|
|
||||||
'django_cas_ng', 'captcha', 'admin', 'jms_oidc_rp',
|
|
||||||
'django_celery_beat', 'contenttypes', 'sessions', 'auth'
|
|
||||||
}
|
|
||||||
exclude_models = {
|
|
||||||
'UserPasswordHistory', 'ContentType',
|
|
||||||
'MessageContent', 'SiteMessage',
|
|
||||||
'PlatformAutomation', 'PlatformProtocol', 'Protocol',
|
|
||||||
'HistoricalAccount', 'GatheredUser', 'ApprovalRule',
|
|
||||||
'BaseAutomation', 'CeleryTask', 'Command', 'JobAuditLog',
|
|
||||||
'ConnectionToken', 'SessionJoinRecord',
|
|
||||||
'HistoricalJob', 'Status', 'TicketStep', 'Ticket',
|
|
||||||
'UserAssetGrantedTreeNodeRelation', 'TicketAssignee',
|
|
||||||
'SuperTicket', 'SuperConnectionToken', 'PermNode',
|
|
||||||
'PermedAsset', 'PermedAccount', 'MenuPermission',
|
|
||||||
'Permission', 'TicketSession', 'ApplyLoginTicket',
|
|
||||||
'ApplyCommandTicket', 'ApplyLoginAssetTicket',
|
|
||||||
'FTPLog', 'OperateLog', 'PasswordChangeLog'
|
|
||||||
}
|
|
||||||
for i, app in enumerate(apps.get_models(), 1):
|
|
||||||
app_name = app._meta.app_label
|
|
||||||
model_name = app._meta.object_name
|
|
||||||
if app_name in exclude_apps or \
|
|
||||||
model_name in exclude_models or \
|
|
||||||
model_name.endswith('Execution'):
|
|
||||||
continue
|
|
||||||
MODELS_NEED_RECORD.add(model_name)
|
|
|
@ -0,0 +1,4 @@
|
||||||
|
from .activity_log import *
|
||||||
|
from .login_log import *
|
||||||
|
from .operate_log import *
|
||||||
|
from .other import *
|
|
@ -0,0 +1,190 @@
|
||||||
|
# -*- coding: utf-8 -*-
|
||||||
|
#
|
||||||
|
from celery import signals
|
||||||
|
from django.db.models.signals import post_save
|
||||||
|
from django.utils.translation import ugettext_lazy as _
|
||||||
|
|
||||||
|
from audits.models import ActivityLog
|
||||||
|
from assets.models import Asset, Node
|
||||||
|
from accounts.const import AutomationTypes
|
||||||
|
from accounts.models import AccountBackupAutomation
|
||||||
|
from common.utils import get_object_or_none
|
||||||
|
from ops.celery import app
|
||||||
|
from orgs.utils import tmp_to_root_org
|
||||||
|
from terminal.models import Session
|
||||||
|
from users.models import User
|
||||||
|
from jumpserver.utils import current_request
|
||||||
|
|
||||||
|
from ..const import ActivityChoices
|
||||||
|
|
||||||
|
|
||||||
|
class ActivityLogHandler(object):
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def _func_accounts_execute_automation(*args, **kwargs):
|
||||||
|
asset_ids = []
|
||||||
|
pid, tp = kwargs.get('pid'), kwargs.get('tp')
|
||||||
|
model = AutomationTypes.get_type_model(tp)
|
||||||
|
task_type_label = tp.label
|
||||||
|
with tmp_to_root_org():
|
||||||
|
instance = get_object_or_none(model, pk=pid)
|
||||||
|
if instance is not None:
|
||||||
|
asset_ids = instance.get_all_assets().values_list('id', flat=True)
|
||||||
|
return task_type_label, asset_ids
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def _func_accounts_push_accounts_to_assets(*args, **kwargs):
|
||||||
|
return '', args[0][1]
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def _func_accounts_execute_account_backup_plan(*args, **kwargs):
|
||||||
|
asset_ids, pid = [], kwargs.get('pid')
|
||||||
|
with tmp_to_root_org():
|
||||||
|
instance = get_object_or_none(AccountBackupAutomation, pk=pid)
|
||||||
|
if instance is not None:
|
||||||
|
asset_ids = Asset.objects.filter(
|
||||||
|
platform__type__in=instance.types
|
||||||
|
).values_list('id', flat=True)
|
||||||
|
return '', asset_ids
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def _func_assets_verify_accounts_connectivity(*args, **kwargs):
|
||||||
|
return '', args[0][1]
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def _func_accounts_verify_accounts_connectivity(*args, **kwargs):
|
||||||
|
return '', args[0][1]
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def _func_assets_test_assets_connectivity_manual(*args, **kwargs):
|
||||||
|
return '', args[0][0]
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def _func_assets_test_node_assets_connectivity_manual(*args, **kwargs):
|
||||||
|
asset_ids = []
|
||||||
|
node = get_object_or_none(Node, pk=args[0][0])
|
||||||
|
if node is not None:
|
||||||
|
asset_ids = node.get_all_assets().values_list('id', flat=True)
|
||||||
|
return '', asset_ids
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def _func_assets_update_assets_hardware_info_manual(*args, **kwargs):
|
||||||
|
return '', args[0][0]
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def _func_assets_update_node_assets_hardware_info_manual(*args, **kwargs):
|
||||||
|
asset_ids = []
|
||||||
|
node = get_object_or_none(Node, pk=args[0][0])
|
||||||
|
if node is not None:
|
||||||
|
asset_ids = node.get_all_assets().values_list('id', flat=True)
|
||||||
|
return '', asset_ids
|
||||||
|
|
||||||
|
def get_celery_task_info(self, task_name, *args, **kwargs):
|
||||||
|
task_display, resource_ids = self.get_info_by_task_name(
|
||||||
|
task_name, *args, **kwargs
|
||||||
|
)
|
||||||
|
return task_display, resource_ids
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def get_task_display(task_name, **kwargs):
|
||||||
|
task = app.tasks.get(task_name)
|
||||||
|
return getattr(task, 'verbose_name', _('Unknown'))
|
||||||
|
|
||||||
|
def get_info_by_task_name(self, task_name, *args, **kwargs):
|
||||||
|
resource_ids = []
|
||||||
|
task_name_list = str(task_name).split('.')
|
||||||
|
if len(task_name_list) < 2:
|
||||||
|
return '', resource_ids
|
||||||
|
|
||||||
|
task_display = self.get_task_display(task_name)
|
||||||
|
model, name = task_name_list[0], task_name_list[-1]
|
||||||
|
func_name = '_func_%s_%s' % (model, name)
|
||||||
|
handle_func = getattr(self, func_name, None)
|
||||||
|
if handle_func is not None:
|
||||||
|
task_type, resource_ids = handle_func(*args, **kwargs)
|
||||||
|
if task_type:
|
||||||
|
task_display = '%s-%s' % (task_display, task_type)
|
||||||
|
return task_display, resource_ids
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def session_for_activity(obj):
|
||||||
|
detail = _(
|
||||||
|
'{} used account[{}], login method[{}] login the asset.'
|
||||||
|
).format(
|
||||||
|
obj.user, obj.account, obj.login_from_display
|
||||||
|
)
|
||||||
|
return obj.asset_id, detail, ActivityChoices.session_log
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def login_log_for_activity(obj):
|
||||||
|
login_status = _('Success') if obj.status else _('Failed')
|
||||||
|
detail = _('User {} login into this service.[{}]').format(
|
||||||
|
obj.username, login_status
|
||||||
|
)
|
||||||
|
user_id = User.objects.filter(username=obj.username).values('id').first()
|
||||||
|
return user_id['id'], detail, ActivityChoices.login_log
|
||||||
|
|
||||||
|
|
||||||
|
activity_handler = ActivityLogHandler()
|
||||||
|
|
||||||
|
|
||||||
|
@signals.before_task_publish.connect
|
||||||
|
def before_task_publish_for_activity_log(headers=None, **kwargs):
|
||||||
|
task_id, task_name = headers.get('id'), headers.get('task')
|
||||||
|
args, kwargs = kwargs['body'][:2]
|
||||||
|
task_display, resource_ids = activity_handler.get_celery_task_info(
|
||||||
|
task_name, args, **kwargs
|
||||||
|
)
|
||||||
|
activities = []
|
||||||
|
detail = _('User %s performs a task(%s) for this resource.') % (
|
||||||
|
getattr(current_request, 'user', None), task_display
|
||||||
|
)
|
||||||
|
for resource_id in resource_ids:
|
||||||
|
activities.append(
|
||||||
|
ActivityLog(
|
||||||
|
resource_id=resource_id, type=ActivityChoices.task, detail=detail
|
||||||
|
)
|
||||||
|
)
|
||||||
|
ActivityLog.objects.bulk_create(activities)
|
||||||
|
|
||||||
|
activity_info = {
|
||||||
|
'activity_ids': [a.id for a in activities]
|
||||||
|
}
|
||||||
|
kwargs['activity_info'] = activity_info
|
||||||
|
|
||||||
|
|
||||||
|
@signals.task_prerun.connect
|
||||||
|
def on_celery_task_pre_run_for_activity_log(task_id='', **kwargs):
|
||||||
|
activity_info = kwargs['kwargs'].pop('activity_info', None)
|
||||||
|
if activity_info is None:
|
||||||
|
return
|
||||||
|
|
||||||
|
activities = []
|
||||||
|
for activity_id in activity_info['activity_ids']:
|
||||||
|
activities.append(
|
||||||
|
ActivityLog(id=activity_id, detail_id=task_id)
|
||||||
|
)
|
||||||
|
ActivityLog.objects.bulk_update(activities, ('detail_id', ))
|
||||||
|
|
||||||
|
|
||||||
|
@post_save.connect
|
||||||
|
def on_object_created(
|
||||||
|
sender, instance=None, created=False, update_fields=None, **kwargs
|
||||||
|
):
|
||||||
|
handler_mapping = {
|
||||||
|
'Session': activity_handler.session_for_activity,
|
||||||
|
'UserLoginLog': activity_handler.login_log_for_activity
|
||||||
|
}
|
||||||
|
model_name = sender._meta.object_name
|
||||||
|
if not created or model_name not in handler_mapping:
|
||||||
|
return
|
||||||
|
|
||||||
|
resource_id, detail, a_type = handler_mapping[model_name](instance)
|
||||||
|
|
||||||
|
ActivityLog.objects.create(
|
||||||
|
resource_id=resource_id, type=a_type,
|
||||||
|
detail=detail, detail_id=instance.id
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -0,0 +1,96 @@
|
||||||
|
# -*- coding: utf-8 -*-
|
||||||
|
#
|
||||||
|
from django.utils.functional import LazyObject
|
||||||
|
from django.utils.translation import ugettext_lazy as _
|
||||||
|
from django.conf import settings
|
||||||
|
from django.contrib.auth import BACKEND_SESSION_KEY
|
||||||
|
from django.dispatch import receiver
|
||||||
|
from django.utils import timezone, translation
|
||||||
|
from rest_framework.request import Request
|
||||||
|
|
||||||
|
from authentication.signals import post_auth_failed, post_auth_success
|
||||||
|
from authentication.utils import check_different_city_login_if_need
|
||||||
|
from common.utils import get_request_ip, get_logger
|
||||||
|
from users.models import User
|
||||||
|
|
||||||
|
from ..utils import write_login_log
|
||||||
|
|
||||||
|
|
||||||
|
logger = get_logger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
class AuthBackendLabelMapping(LazyObject):
|
||||||
|
@staticmethod
|
||||||
|
def get_login_backends():
|
||||||
|
backend_label_mapping = {}
|
||||||
|
for source, backends in User.SOURCE_BACKEND_MAPPING.items():
|
||||||
|
for backend in backends:
|
||||||
|
backend_label_mapping[backend] = source.label
|
||||||
|
backend_label_mapping[settings.AUTH_BACKEND_PUBKEY] = _("SSH Key")
|
||||||
|
backend_label_mapping[settings.AUTH_BACKEND_MODEL] = _("Password")
|
||||||
|
backend_label_mapping[settings.AUTH_BACKEND_SSO] = _("SSO")
|
||||||
|
backend_label_mapping[settings.AUTH_BACKEND_AUTH_TOKEN] = _("Auth Token")
|
||||||
|
backend_label_mapping[settings.AUTH_BACKEND_WECOM] = _("WeCom")
|
||||||
|
backend_label_mapping[settings.AUTH_BACKEND_FEISHU] = _("FeiShu")
|
||||||
|
backend_label_mapping[settings.AUTH_BACKEND_DINGTALK] = _("DingTalk")
|
||||||
|
backend_label_mapping[settings.AUTH_BACKEND_TEMP_TOKEN] = _("Temporary token")
|
||||||
|
return backend_label_mapping
|
||||||
|
|
||||||
|
def _setup(self):
|
||||||
|
self._wrapped = self.get_login_backends()
|
||||||
|
|
||||||
|
|
||||||
|
AUTH_BACKEND_LABEL_MAPPING = AuthBackendLabelMapping()
|
||||||
|
|
||||||
|
|
||||||
|
def get_login_backend(request):
|
||||||
|
backend = request.session.get('auth_backend', '') or \
|
||||||
|
request.session.get(BACKEND_SESSION_KEY, '')
|
||||||
|
|
||||||
|
backend_label = AUTH_BACKEND_LABEL_MAPPING.get(backend, None)
|
||||||
|
if backend_label is None:
|
||||||
|
backend_label = ''
|
||||||
|
return backend_label
|
||||||
|
|
||||||
|
|
||||||
|
def generate_data(username, request, login_type=None):
|
||||||
|
user_agent = request.META.get('HTTP_USER_AGENT', '')
|
||||||
|
login_ip = get_request_ip(request) or '0.0.0.0'
|
||||||
|
|
||||||
|
if login_type is None and isinstance(request, Request):
|
||||||
|
login_type = request.META.get('HTTP_X_JMS_LOGIN_TYPE', 'U')
|
||||||
|
if login_type is None:
|
||||||
|
login_type = 'W'
|
||||||
|
|
||||||
|
with translation.override('en'):
|
||||||
|
backend = str(get_login_backend(request))
|
||||||
|
|
||||||
|
data = {
|
||||||
|
'username': username,
|
||||||
|
'ip': login_ip,
|
||||||
|
'type': login_type,
|
||||||
|
'user_agent': user_agent[0:254],
|
||||||
|
'datetime': timezone.now(),
|
||||||
|
'backend': backend,
|
||||||
|
}
|
||||||
|
return data
|
||||||
|
|
||||||
|
|
||||||
|
@receiver(post_auth_success)
|
||||||
|
def on_user_auth_success(sender, user, request, login_type=None, **kwargs):
|
||||||
|
logger.debug('User login success: {}'.format(user.username))
|
||||||
|
check_different_city_login_if_need(user, request)
|
||||||
|
data = generate_data(
|
||||||
|
user.username, request, login_type=login_type
|
||||||
|
)
|
||||||
|
request.session['login_time'] = data['datetime'].strftime("%Y-%m-%d %H:%M:%S")
|
||||||
|
data.update({'mfa': int(user.mfa_enabled), 'status': True})
|
||||||
|
write_login_log(**data)
|
||||||
|
|
||||||
|
|
||||||
|
@receiver(post_auth_failed)
|
||||||
|
def on_user_auth_failed(sender, username, request, reason='', **kwargs):
|
||||||
|
logger.debug('User login failed: {}'.format(username))
|
||||||
|
data = generate_data(username, request)
|
||||||
|
data.update({'reason': reason[:128], 'status': False})
|
||||||
|
write_login_log(**data)
|
|
@ -0,0 +1,180 @@
|
||||||
|
# -*- coding: utf-8 -*-
|
||||||
|
#
|
||||||
|
import uuid
|
||||||
|
|
||||||
|
from django.apps import apps
|
||||||
|
from django.dispatch import receiver
|
||||||
|
from django.db.models.signals import post_save, pre_save, m2m_changed, pre_delete
|
||||||
|
|
||||||
|
from audits.handler import (
|
||||||
|
get_instance_current_with_cache_diff, cache_instance_before_data,
|
||||||
|
create_or_update_operate_log, get_instance_dict_from_cache
|
||||||
|
)
|
||||||
|
from audits.utils import model_to_dict_for_operate_log as model_to_dict
|
||||||
|
from common.const.signals import POST_ADD, POST_REMOVE, POST_CLEAR, SKIP_SIGNAL
|
||||||
|
from common.signals import django_ready
|
||||||
|
|
||||||
|
from ..const import MODELS_NEED_RECORD, ActionChoices
|
||||||
|
|
||||||
|
|
||||||
|
M2M_ACTION = {
|
||||||
|
POST_ADD: ActionChoices.create,
|
||||||
|
POST_REMOVE: ActionChoices.delete,
|
||||||
|
POST_CLEAR: ActionChoices.delete,
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
@receiver(m2m_changed)
|
||||||
|
def on_m2m_changed(sender, action, instance, reverse, model, pk_set, **kwargs):
|
||||||
|
if action not in M2M_ACTION:
|
||||||
|
return
|
||||||
|
if not instance:
|
||||||
|
return
|
||||||
|
|
||||||
|
resource_type = instance._meta.verbose_name
|
||||||
|
current_instance = model_to_dict(instance, include_model_fields=False)
|
||||||
|
|
||||||
|
instance_id = current_instance.get('id')
|
||||||
|
log_id, before_instance = get_instance_dict_from_cache(instance_id)
|
||||||
|
|
||||||
|
field_name = str(model._meta.verbose_name)
|
||||||
|
objs = model.objects.filter(pk__in=pk_set)
|
||||||
|
objs_display = [str(o) for o in objs]
|
||||||
|
action = M2M_ACTION[action]
|
||||||
|
changed_field = current_instance.get(field_name, [])
|
||||||
|
|
||||||
|
after, before, before_value = None, None, None
|
||||||
|
if action == ActionChoices.create:
|
||||||
|
before_value = list(set(changed_field) - set(objs_display))
|
||||||
|
elif action == ActionChoices.delete:
|
||||||
|
before_value = list(
|
||||||
|
set(changed_field).symmetric_difference(set(objs_display))
|
||||||
|
)
|
||||||
|
|
||||||
|
if changed_field:
|
||||||
|
after = {field_name: changed_field}
|
||||||
|
if before_value:
|
||||||
|
before = {field_name: before_value}
|
||||||
|
|
||||||
|
if sorted(str(before)) == sorted(str(after)):
|
||||||
|
return
|
||||||
|
|
||||||
|
create_or_update_operate_log(
|
||||||
|
ActionChoices.update, resource_type,
|
||||||
|
resource=instance, log_id=log_id, before=before, after=after
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def signal_of_operate_log_whether_continue(
|
||||||
|
sender, instance, created, update_fields=None
|
||||||
|
):
|
||||||
|
condition = True
|
||||||
|
if not instance:
|
||||||
|
condition = False
|
||||||
|
if instance and getattr(instance, SKIP_SIGNAL, False):
|
||||||
|
condition = False
|
||||||
|
# 终端模型的 create 事件由系统产生,不记录
|
||||||
|
if instance._meta.object_name == 'Terminal' and created:
|
||||||
|
condition = False
|
||||||
|
# last_login 改变是最后登录日期, 每次登录都会改变
|
||||||
|
if instance._meta.object_name == 'User' and \
|
||||||
|
update_fields and 'last_login' in update_fields:
|
||||||
|
condition = False
|
||||||
|
# 不在记录白名单中,跳过
|
||||||
|
if sender._meta.object_name not in MODELS_NEED_RECORD:
|
||||||
|
condition = False
|
||||||
|
return condition
|
||||||
|
|
||||||
|
|
||||||
|
@receiver(pre_save)
|
||||||
|
def on_object_pre_create_or_update(
|
||||||
|
sender, instance=None, raw=False, using=None, update_fields=None, **kwargs
|
||||||
|
):
|
||||||
|
ok = signal_of_operate_log_whether_continue(
|
||||||
|
sender, instance, False, update_fields
|
||||||
|
)
|
||||||
|
if not ok:
|
||||||
|
return
|
||||||
|
|
||||||
|
# users.PrivateToken Model 没有 id 有 pk字段
|
||||||
|
instance_id = getattr(instance, 'id', getattr(instance, 'pk', None))
|
||||||
|
instance_before_data = {'id': instance_id}
|
||||||
|
raw_instance = type(instance).objects.filter(pk=instance_id).first()
|
||||||
|
|
||||||
|
if raw_instance:
|
||||||
|
instance_before_data = model_to_dict(raw_instance)
|
||||||
|
operate_log_id = str(uuid.uuid4())
|
||||||
|
instance_before_data['operate_log_id'] = operate_log_id
|
||||||
|
setattr(instance, 'operate_log_id', operate_log_id)
|
||||||
|
cache_instance_before_data(instance_before_data)
|
||||||
|
|
||||||
|
|
||||||
|
@receiver(post_save)
|
||||||
|
def on_object_created_or_update(
|
||||||
|
sender, instance=None, created=False, update_fields=None, **kwargs
|
||||||
|
):
|
||||||
|
ok = signal_of_operate_log_whether_continue(
|
||||||
|
sender, instance, created, update_fields
|
||||||
|
)
|
||||||
|
if not ok:
|
||||||
|
return
|
||||||
|
|
||||||
|
log_id, before, after = None, None, None
|
||||||
|
if created:
|
||||||
|
action = ActionChoices.create
|
||||||
|
after = model_to_dict(instance)
|
||||||
|
log_id = getattr(instance, 'operate_log_id', None)
|
||||||
|
else:
|
||||||
|
action = ActionChoices.update
|
||||||
|
current_instance = model_to_dict(instance)
|
||||||
|
log_id, before, after = get_instance_current_with_cache_diff(current_instance)
|
||||||
|
|
||||||
|
resource_type = sender._meta.verbose_name
|
||||||
|
object_name = sender._meta.object_name
|
||||||
|
create_or_update_operate_log(
|
||||||
|
action, resource_type, resource=instance, log_id=log_id,
|
||||||
|
before=before, after=after, object_name=object_name
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@receiver(pre_delete)
|
||||||
|
def on_object_delete(sender, instance=None, **kwargs):
|
||||||
|
ok = signal_of_operate_log_whether_continue(sender, instance, False)
|
||||||
|
if not ok:
|
||||||
|
return
|
||||||
|
|
||||||
|
resource_type = sender._meta.verbose_name
|
||||||
|
create_or_update_operate_log(
|
||||||
|
ActionChoices.delete, resource_type,
|
||||||
|
resource=instance, before=model_to_dict(instance)
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@receiver(django_ready)
|
||||||
|
def on_django_start_set_operate_log_monitor_models(sender, **kwargs):
|
||||||
|
exclude_apps = {
|
||||||
|
'django_cas_ng', 'captcha', 'admin', 'jms_oidc_rp', 'audits',
|
||||||
|
'django_celery_beat', 'contenttypes', 'sessions', 'auth',
|
||||||
|
}
|
||||||
|
exclude_models = {
|
||||||
|
'UserPasswordHistory', 'ContentType',
|
||||||
|
'MessageContent', 'SiteMessage',
|
||||||
|
'PlatformAutomation', 'PlatformProtocol', 'Protocol',
|
||||||
|
'HistoricalAccount', 'GatheredUser', 'ApprovalRule',
|
||||||
|
'BaseAutomation', 'CeleryTask', 'Command', 'JobAuditLog',
|
||||||
|
'ConnectionToken', 'SessionJoinRecord',
|
||||||
|
'HistoricalJob', 'Status', 'TicketStep', 'Ticket',
|
||||||
|
'UserAssetGrantedTreeNodeRelation', 'TicketAssignee',
|
||||||
|
'SuperTicket', 'SuperConnectionToken', 'PermNode',
|
||||||
|
'PermedAsset', 'PermedAccount', 'MenuPermission',
|
||||||
|
'Permission', 'TicketSession', 'ApplyLoginTicket',
|
||||||
|
'ApplyCommandTicket', 'ApplyLoginAssetTicket',
|
||||||
|
}
|
||||||
|
for i, app in enumerate(apps.get_models(), 1):
|
||||||
|
app_name = app._meta.app_label
|
||||||
|
model_name = app._meta.object_name
|
||||||
|
if app_name in exclude_apps or \
|
||||||
|
model_name in exclude_models or \
|
||||||
|
model_name.endswith('Execution'):
|
||||||
|
continue
|
||||||
|
MODELS_NEED_RECORD.add(model_name)
|
|
@ -0,0 +1,68 @@
|
||||||
|
# -*- coding: utf-8 -*-
|
||||||
|
#
|
||||||
|
from django.dispatch import receiver
|
||||||
|
from django.db import transaction
|
||||||
|
|
||||||
|
from audits.models import (
|
||||||
|
PasswordChangeLog, UserLoginLog, FTPLog, OperateLog
|
||||||
|
)
|
||||||
|
from audits.serializers import (
|
||||||
|
UserLoginLogSerializer, FTPLogSerializer, OperateLogSerializer,
|
||||||
|
PasswordChangeLogSerializer
|
||||||
|
)
|
||||||
|
from common.utils import get_request_ip, get_syslogger
|
||||||
|
from common.utils.encode import data_to_json
|
||||||
|
from jumpserver.utils import current_request
|
||||||
|
from users.models import User
|
||||||
|
from users.signals import post_user_change_password
|
||||||
|
from terminal.models import Session, Command
|
||||||
|
from terminal.serializers import SessionSerializer, SessionCommandSerializer
|
||||||
|
|
||||||
|
|
||||||
|
sys_logger = get_syslogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
@receiver(post_user_change_password, sender=User)
|
||||||
|
def on_user_change_password(sender, user=None, **kwargs):
|
||||||
|
if not current_request:
|
||||||
|
remote_addr = '127.0.0.1'
|
||||||
|
change_by = 'System'
|
||||||
|
else:
|
||||||
|
remote_addr = get_request_ip(current_request)
|
||||||
|
if not current_request.user.is_authenticated:
|
||||||
|
change_by = str(user)
|
||||||
|
else:
|
||||||
|
change_by = str(current_request.user)
|
||||||
|
with transaction.atomic():
|
||||||
|
PasswordChangeLog.objects.create(
|
||||||
|
user=str(user), change_by=change_by,
|
||||||
|
remote_addr=remote_addr,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def on_audits_log_create(sender, instance=None, **kwargs):
|
||||||
|
if sender == UserLoginLog:
|
||||||
|
category = "login_log"
|
||||||
|
serializer_cls = UserLoginLogSerializer
|
||||||
|
elif sender == FTPLog:
|
||||||
|
category = "ftp_log"
|
||||||
|
serializer_cls = FTPLogSerializer
|
||||||
|
elif sender == OperateLog:
|
||||||
|
category = "operation_log"
|
||||||
|
serializer_cls = OperateLogSerializer
|
||||||
|
elif sender == PasswordChangeLog:
|
||||||
|
category = "password_change_log"
|
||||||
|
serializer_cls = PasswordChangeLogSerializer
|
||||||
|
elif sender == Session:
|
||||||
|
category = "host_session_log"
|
||||||
|
serializer_cls = SessionSerializer
|
||||||
|
elif sender == Command:
|
||||||
|
category = "session_command_log"
|
||||||
|
serializer_cls = SessionCommandSerializer
|
||||||
|
else:
|
||||||
|
return
|
||||||
|
|
||||||
|
serializer = serializer_cls(instance)
|
||||||
|
data = data_to_json(serializer.data, indent=None)
|
||||||
|
msg = "{} - {}".format(category, data)
|
||||||
|
sys_logger.info(msg)
|
|
@ -7,7 +7,7 @@ from celery import shared_task
|
||||||
from ops.celery.decorator import (
|
from ops.celery.decorator import (
|
||||||
register_as_period_task
|
register_as_period_task
|
||||||
)
|
)
|
||||||
from .models import UserLoginLog, OperateLog, FTPLog
|
from .models import UserLoginLog, OperateLog, FTPLog, ActivityLog
|
||||||
from common.utils import get_log_keep_day
|
from common.utils import get_log_keep_day
|
||||||
from django.utils.translation import gettext_lazy as _
|
from django.utils.translation import gettext_lazy as _
|
||||||
|
|
||||||
|
@ -26,6 +26,13 @@ def clean_operation_log_period():
|
||||||
OperateLog.objects.filter(datetime__lt=expired_day).delete()
|
OperateLog.objects.filter(datetime__lt=expired_day).delete()
|
||||||
|
|
||||||
|
|
||||||
|
def clean_activity_log_period():
|
||||||
|
now = timezone.now()
|
||||||
|
days = get_log_keep_day('ACTIVITY_LOG_KEEP_DAYS')
|
||||||
|
expired_day = now - datetime.timedelta(days=days)
|
||||||
|
ActivityLog.objects.filter(datetime__lt=expired_day).delete()
|
||||||
|
|
||||||
|
|
||||||
def clean_ftp_log_period():
|
def clean_ftp_log_period():
|
||||||
now = timezone.now()
|
now = timezone.now()
|
||||||
days = get_log_keep_day('FTP_LOG_KEEP_DAYS')
|
days = get_log_keep_day('FTP_LOG_KEEP_DAYS')
|
||||||
|
|
|
@ -20,6 +20,7 @@ class TicketStatusApi(mixins.AuthMixin, APIView):
|
||||||
try:
|
try:
|
||||||
self.check_user_login_confirm()
|
self.check_user_login_confirm()
|
||||||
self.request.session['auth_third_party_done'] = 1
|
self.request.session['auth_third_party_done'] = 1
|
||||||
|
self.request.session.pop('auth_third_party_required', '')
|
||||||
return Response({"msg": "ok"})
|
return Response({"msg": "ok"})
|
||||||
except errors.LoginConfirmOtherError as e:
|
except errors.LoginConfirmOtherError as e:
|
||||||
reason = e.msg
|
reason = e.msg
|
||||||
|
|
|
@ -86,10 +86,10 @@ class OAuth2EndSessionView(View):
|
||||||
logger.debug(log_prompt.format('Log out the current user: {}'.format(request.user)))
|
logger.debug(log_prompt.format('Log out the current user: {}'.format(request.user)))
|
||||||
auth.logout(request)
|
auth.logout(request)
|
||||||
|
|
||||||
if settings.AUTH_OAUTH2_LOGOUT_COMPLETELY:
|
logout_url = settings.AUTH_OAUTH2_PROVIDER_END_SESSION_ENDPOINT
|
||||||
|
if settings.AUTH_OAUTH2_LOGOUT_COMPLETELY and logout_url:
|
||||||
logger.debug(log_prompt.format('Log out OAUTH2 platform user session synchronously'))
|
logger.debug(log_prompt.format('Log out OAUTH2 platform user session synchronously'))
|
||||||
next_url = settings.AUTH_OAUTH2_PROVIDER_END_SESSION_ENDPOINT
|
return HttpResponseRedirect(logout_url)
|
||||||
return HttpResponseRedirect(next_url)
|
|
||||||
|
|
||||||
logger.debug(log_prompt.format('Redirect'))
|
logger.debug(log_prompt.format('Redirect'))
|
||||||
return HttpResponseRedirect(logout_url)
|
return HttpResponseRedirect(logout_url)
|
||||||
|
|
|
@ -62,6 +62,17 @@ class ThirdPartyLoginMiddleware(mixins.AuthMixin):
|
||||||
return response
|
return response
|
||||||
if not request.session.get('auth_third_party_required'):
|
if not request.session.get('auth_third_party_required'):
|
||||||
return response
|
return response
|
||||||
|
white_urls = [
|
||||||
|
'jsi18n/', '/static/',
|
||||||
|
'login/guard', 'login/wait-confirm',
|
||||||
|
'login-confirm-ticket/status',
|
||||||
|
'settings/public/open',
|
||||||
|
'core/auth/login', 'core/auth/logout'
|
||||||
|
]
|
||||||
|
for url in white_urls:
|
||||||
|
if request.path.find(url) > -1:
|
||||||
|
return response
|
||||||
|
|
||||||
ip = get_request_ip(request)
|
ip = get_request_ip(request)
|
||||||
try:
|
try:
|
||||||
self.request = request
|
self.request = request
|
||||||
|
@ -89,7 +100,6 @@ class ThirdPartyLoginMiddleware(mixins.AuthMixin):
|
||||||
guard_url = "%s?%s" % (guard_url, args)
|
guard_url = "%s?%s" % (guard_url, args)
|
||||||
response = redirect(guard_url)
|
response = redirect(guard_url)
|
||||||
finally:
|
finally:
|
||||||
request.session.pop('auth_third_party_required', '')
|
|
||||||
return response
|
return response
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -369,7 +369,7 @@ class AuthACLMixin:
|
||||||
def check_user_login_confirm(self):
|
def check_user_login_confirm(self):
|
||||||
ticket = self.get_ticket()
|
ticket = self.get_ticket()
|
||||||
if not ticket:
|
if not ticket:
|
||||||
raise errors.LoginConfirmOtherError('', "Not found")
|
raise errors.LoginConfirmOtherError('', "Not found", '')
|
||||||
elif ticket.is_state(ticket.State.approved):
|
elif ticket.is_state(ticket.State.approved):
|
||||||
self.request.session["auth_confirm_required"] = ''
|
self.request.session["auth_confirm_required"] = ''
|
||||||
return
|
return
|
||||||
|
|
|
@ -1,8 +1,9 @@
|
||||||
from django.utils.translation import ugettext_lazy as _
|
from django.utils.translation import ugettext_lazy as _
|
||||||
from rest_framework import serializers
|
from rest_framework import serializers
|
||||||
|
|
||||||
from common.serializers.fields import EncryptedField
|
from perms.serializers.permission import ActionChoicesField
|
||||||
from orgs.mixins.serializers import OrgResourceModelSerializerMixin
|
from orgs.mixins.serializers import OrgResourceModelSerializerMixin
|
||||||
|
from common.serializers.fields import EncryptedField
|
||||||
from ..models import ConnectionToken
|
from ..models import ConnectionToken
|
||||||
|
|
||||||
__all__ = [
|
__all__ = [
|
||||||
|
@ -16,6 +17,7 @@ class ConnectionTokenSerializer(OrgResourceModelSerializerMixin):
|
||||||
label=_("Input secret"), max_length=40960, required=False, allow_blank=True
|
label=_("Input secret"), max_length=40960, required=False, allow_blank=True
|
||||||
)
|
)
|
||||||
from_ticket_info = serializers.SerializerMethodField(label=_("Ticket info"))
|
from_ticket_info = serializers.SerializerMethodField(label=_("Ticket info"))
|
||||||
|
actions = ActionChoicesField(read_only=True, label=_("Actions"))
|
||||||
|
|
||||||
class Meta:
|
class Meta:
|
||||||
model = ConnectionToken
|
model = ConnectionToken
|
||||||
|
@ -29,7 +31,7 @@ class ConnectionTokenSerializer(OrgResourceModelSerializerMixin):
|
||||||
]
|
]
|
||||||
read_only_fields = [
|
read_only_fields = [
|
||||||
# 普通 Token 不支持指定 user
|
# 普通 Token 不支持指定 user
|
||||||
'user', 'expire_time',
|
'user', 'expire_time', 'is_expired',
|
||||||
'user_display', 'asset_display',
|
'user_display', 'asset_display',
|
||||||
]
|
]
|
||||||
fields = fields_small + read_only_fields
|
fields = fields_small + read_only_fields
|
||||||
|
|
|
@ -19,7 +19,7 @@ from orgs.utils import current_org
|
||||||
from ops.const import JobStatus
|
from ops.const import JobStatus
|
||||||
from ops.models import Job, JobExecution
|
from ops.models import Job, JobExecution
|
||||||
from common.utils import lazyproperty
|
from common.utils import lazyproperty
|
||||||
from audits.models import UserLoginLog, PasswordChangeLog, OperateLog
|
from audits.models import UserLoginLog, PasswordChangeLog, OperateLog, FTPLog
|
||||||
from audits.const import LoginStatusChoices
|
from audits.const import LoginStatusChoices
|
||||||
from common.utils.timezone import local_now, local_zero_hour
|
from common.utils.timezone import local_now, local_zero_hour
|
||||||
from orgs.caches import OrgResourceStatisticsCache
|
from orgs.caches import OrgResourceStatisticsCache
|
||||||
|
@ -38,13 +38,13 @@ class DateTimeMixin:
|
||||||
def days(self):
|
def days(self):
|
||||||
query_params = self.request.query_params
|
query_params = self.request.query_params
|
||||||
count = query_params.get('days')
|
count = query_params.get('days')
|
||||||
count = int(count) if count else 0
|
count = int(count) if count else 1
|
||||||
return count
|
return count
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def days_to_datetime(self):
|
def days_to_datetime(self):
|
||||||
days = self.days
|
days = self.days
|
||||||
if days == 0:
|
if days == 1:
|
||||||
t = local_zero_hour()
|
t = local_zero_hour()
|
||||||
else:
|
else:
|
||||||
t = local_now() - timezone.timedelta(days=days)
|
t = local_now() - timezone.timedelta(days=days)
|
||||||
|
@ -109,7 +109,7 @@ class DateTimeMixin:
|
||||||
@lazyproperty
|
@lazyproperty
|
||||||
def ftp_logs_queryset(self):
|
def ftp_logs_queryset(self):
|
||||||
t = self.days_to_datetime
|
t = self.days_to_datetime
|
||||||
queryset = OperateLog.objects.filter(datetime__gte=t)
|
queryset = FTPLog.objects.filter(date_start__gte=t)
|
||||||
queryset = self.get_logs_queryset(queryset, 'user')
|
queryset = self.get_logs_queryset(queryset, 'user')
|
||||||
return queryset
|
return queryset
|
||||||
|
|
||||||
|
@ -297,7 +297,7 @@ class DatesLoginMetricMixin:
|
||||||
|
|
||||||
@lazyproperty
|
@lazyproperty
|
||||||
def user_login_amount(self):
|
def user_login_amount(self):
|
||||||
return self.login_logs_queryset.values('username').distinct().count()
|
return self.login_logs_queryset.values('username').count()
|
||||||
|
|
||||||
@lazyproperty
|
@lazyproperty
|
||||||
def operate_logs_amount(self):
|
def operate_logs_amount(self):
|
||||||
|
|
|
@ -512,6 +512,7 @@ class Config(dict):
|
||||||
'LOGIN_LOG_KEEP_DAYS': 200,
|
'LOGIN_LOG_KEEP_DAYS': 200,
|
||||||
'TASK_LOG_KEEP_DAYS': 90,
|
'TASK_LOG_KEEP_DAYS': 90,
|
||||||
'OPERATE_LOG_KEEP_DAYS': 200,
|
'OPERATE_LOG_KEEP_DAYS': 200,
|
||||||
|
'ACTIVITY_LOG_KEEP_DAYS': 200,
|
||||||
'FTP_LOG_KEEP_DAYS': 200,
|
'FTP_LOG_KEEP_DAYS': 200,
|
||||||
'CLOUD_SYNC_TASK_EXECUTION_KEEP_DAYS': 30,
|
'CLOUD_SYNC_TASK_EXECUTION_KEEP_DAYS': 30,
|
||||||
|
|
||||||
|
|
|
@ -117,6 +117,7 @@ WS_LISTEN_PORT = CONFIG.WS_LISTEN_PORT
|
||||||
LOGIN_LOG_KEEP_DAYS = CONFIG.LOGIN_LOG_KEEP_DAYS
|
LOGIN_LOG_KEEP_DAYS = CONFIG.LOGIN_LOG_KEEP_DAYS
|
||||||
TASK_LOG_KEEP_DAYS = CONFIG.TASK_LOG_KEEP_DAYS
|
TASK_LOG_KEEP_DAYS = CONFIG.TASK_LOG_KEEP_DAYS
|
||||||
OPERATE_LOG_KEEP_DAYS = CONFIG.OPERATE_LOG_KEEP_DAYS
|
OPERATE_LOG_KEEP_DAYS = CONFIG.OPERATE_LOG_KEEP_DAYS
|
||||||
|
ACTIVITY_LOG_KEEP_DAYS = CONFIG.ACTIVITY_LOG_KEEP_DAYS
|
||||||
FTP_LOG_KEEP_DAYS = CONFIG.FTP_LOG_KEEP_DAYS
|
FTP_LOG_KEEP_DAYS = CONFIG.FTP_LOG_KEEP_DAYS
|
||||||
ORG_CHANGE_TO_URL = CONFIG.ORG_CHANGE_TO_URL
|
ORG_CHANGE_TO_URL = CONFIG.ORG_CHANGE_TO_URL
|
||||||
WINDOWS_SKIP_ALL_MANUAL_PASSWORD = CONFIG.WINDOWS_SKIP_ALL_MANUAL_PASSWORD
|
WINDOWS_SKIP_ALL_MANUAL_PASSWORD = CONFIG.WINDOWS_SKIP_ALL_MANUAL_PASSWORD
|
||||||
|
|
|
@ -139,7 +139,9 @@ class JMSInventory:
|
||||||
return host
|
return host
|
||||||
|
|
||||||
def select_account(self, asset):
|
def select_account(self, asset):
|
||||||
accounts = list(asset.accounts.all())
|
accounts = list(asset.accounts.filter(is_active=True))
|
||||||
|
if not accounts:
|
||||||
|
return None
|
||||||
account_selected = None
|
account_selected = None
|
||||||
account_usernames = self.account_prefer
|
account_usernames = self.account_prefer
|
||||||
|
|
||||||
|
|
|
@ -13,10 +13,12 @@ __all__ = [
|
||||||
class AdHocViewSet(OrgBulkModelViewSet):
|
class AdHocViewSet(OrgBulkModelViewSet):
|
||||||
serializer_class = AdHocSerializer
|
serializer_class = AdHocSerializer
|
||||||
permission_classes = ()
|
permission_classes = ()
|
||||||
|
search_fields = ('name', 'comment')
|
||||||
model = AdHoc
|
model = AdHoc
|
||||||
|
|
||||||
def allow_bulk_destroy(self, qs, filtered):
|
def allow_bulk_destroy(self, qs, filtered):
|
||||||
return True
|
return True
|
||||||
|
|
||||||
def get_queryset(self):
|
def get_queryset(self):
|
||||||
queryset = super().get_queryset()
|
queryset = super().get_queryset()
|
||||||
return queryset.filter(creator=self.request.user)
|
return queryset.filter(creator=self.request.user)
|
||||||
|
|
|
@ -106,6 +106,8 @@ class CeleryTaskViewSet(
|
||||||
mixins.ListModelMixin, mixins.DestroyModelMixin,
|
mixins.ListModelMixin, mixins.DestroyModelMixin,
|
||||||
viewsets.GenericViewSet
|
viewsets.GenericViewSet
|
||||||
):
|
):
|
||||||
|
filterset_fields = ('id', 'name')
|
||||||
|
search_fields = filterset_fields
|
||||||
serializer_class = CeleryTaskSerializer
|
serializer_class = CeleryTaskSerializer
|
||||||
|
|
||||||
def get_queryset(self):
|
def get_queryset(self):
|
||||||
|
@ -116,6 +118,7 @@ class CeleryTaskExecutionViewSet(CommonApiMixin, viewsets.ModelViewSet):
|
||||||
serializer_class = CeleryTaskExecutionSerializer
|
serializer_class = CeleryTaskExecutionSerializer
|
||||||
http_method_names = ('get', 'post', 'head', 'options',)
|
http_method_names = ('get', 'post', 'head', 'options',)
|
||||||
queryset = CeleryTaskExecution.objects.all()
|
queryset = CeleryTaskExecution.objects.all()
|
||||||
|
search_fields = ('name',)
|
||||||
|
|
||||||
def get_queryset(self):
|
def get_queryset(self):
|
||||||
task_id = self.request.query_params.get('task_id')
|
task_id = self.request.query_params.get('task_id')
|
||||||
|
|
|
@ -1,8 +1,10 @@
|
||||||
from django.db.models import Count
|
from django.db.models import Count
|
||||||
|
from django.db.transaction import atomic
|
||||||
from rest_framework.views import APIView
|
from rest_framework.views import APIView
|
||||||
from django.shortcuts import get_object_or_404
|
from django.shortcuts import get_object_or_404
|
||||||
from rest_framework.response import Response
|
from rest_framework.response import Response
|
||||||
|
|
||||||
|
from ops.const import Types
|
||||||
from ops.models import Job, JobExecution
|
from ops.models import Job, JobExecution
|
||||||
from ops.serializers.job import JobSerializer, JobExecutionSerializer
|
from ops.serializers.job import JobSerializer, JobExecutionSerializer
|
||||||
|
|
||||||
|
@ -12,7 +14,7 @@ __all__ = ['JobViewSet', 'JobExecutionViewSet', 'JobRunVariableHelpAPIView',
|
||||||
from ops.tasks import run_ops_job_execution
|
from ops.tasks import run_ops_job_execution
|
||||||
from ops.variables import JMS_JOB_VARIABLE_HELP
|
from ops.variables import JMS_JOB_VARIABLE_HELP
|
||||||
from orgs.mixins.api import OrgBulkModelViewSet
|
from orgs.mixins.api import OrgBulkModelViewSet
|
||||||
from orgs.utils import tmp_to_org, get_current_org_id, get_current_org
|
from orgs.utils import tmp_to_org, get_current_org
|
||||||
from accounts.models import Account
|
from accounts.models import Account
|
||||||
|
|
||||||
|
|
||||||
|
@ -25,6 +27,7 @@ def set_task_to_serializer_data(serializer, task):
|
||||||
class JobViewSet(OrgBulkModelViewSet):
|
class JobViewSet(OrgBulkModelViewSet):
|
||||||
serializer_class = JobSerializer
|
serializer_class = JobSerializer
|
||||||
permission_classes = ()
|
permission_classes = ()
|
||||||
|
search_fields = ('name', 'comment')
|
||||||
model = Job
|
model = Job
|
||||||
|
|
||||||
def allow_bulk_destroy(self, qs, filtered):
|
def allow_bulk_destroy(self, qs, filtered):
|
||||||
|
@ -62,10 +65,14 @@ class JobExecutionViewSet(OrgBulkModelViewSet):
|
||||||
http_method_names = ('get', 'post', 'head', 'options',)
|
http_method_names = ('get', 'post', 'head', 'options',)
|
||||||
permission_classes = ()
|
permission_classes = ()
|
||||||
model = JobExecution
|
model = JobExecution
|
||||||
|
search_fields = ('material',)
|
||||||
|
|
||||||
|
@atomic
|
||||||
def perform_create(self, serializer):
|
def perform_create(self, serializer):
|
||||||
instance = serializer.save()
|
instance = serializer.save()
|
||||||
instance.job_version = instance.job.version
|
instance.job_version = instance.job.version
|
||||||
|
instance.material = instance.job.material
|
||||||
|
instance.type = Types[instance.job.type].value
|
||||||
instance.creator = self.request.user
|
instance.creator = self.request.user
|
||||||
instance.save()
|
instance.save()
|
||||||
task = run_ops_job_execution.delay(instance.id)
|
task = run_ops_job_execution.delay(instance.id)
|
||||||
|
@ -123,6 +130,7 @@ class FrequentUsernames(APIView):
|
||||||
permission_classes = ()
|
permission_classes = ()
|
||||||
|
|
||||||
def get(self, request, **kwargs):
|
def get(self, request, **kwargs):
|
||||||
top_accounts = Account.objects.exclude(username='root').exclude(username__startswith='jms_').values('username').annotate(
|
top_accounts = Account.objects.exclude(username='root').exclude(username__startswith='jms_').values(
|
||||||
|
'username').annotate(
|
||||||
total=Count('username')).order_by('total')[:5]
|
total=Count('username')).order_by('total')[:5]
|
||||||
return Response(data=top_accounts)
|
return Response(data=top_accounts)
|
||||||
|
|
|
@ -26,9 +26,7 @@ class PlaybookViewSet(OrgBulkModelViewSet):
|
||||||
serializer_class = PlaybookSerializer
|
serializer_class = PlaybookSerializer
|
||||||
permission_classes = ()
|
permission_classes = ()
|
||||||
model = Playbook
|
model = Playbook
|
||||||
|
search_fields = ('name', 'comment')
|
||||||
def allow_bulk_destroy(self, qs, filtered):
|
|
||||||
return True
|
|
||||||
|
|
||||||
def get_queryset(self):
|
def get_queryset(self):
|
||||||
queryset = super().get_queryset()
|
queryset = super().get_queryset()
|
||||||
|
@ -37,28 +35,27 @@ class PlaybookViewSet(OrgBulkModelViewSet):
|
||||||
|
|
||||||
def perform_create(self, serializer):
|
def perform_create(self, serializer):
|
||||||
instance = serializer.save()
|
instance = serializer.save()
|
||||||
if instance.create_method == 'blank':
|
if 'multipart/form-data' in self.request.headers['Content-Type']:
|
||||||
dest_path = os.path.join(settings.DATA_DIR, "ops", "playbook", instance.id.__str__())
|
|
||||||
os.makedirs(dest_path)
|
|
||||||
with open(os.path.join(dest_path, 'main.yml'), 'w') as f:
|
|
||||||
f.write('## write your playbook here')
|
|
||||||
|
|
||||||
if instance.create_method == 'upload':
|
|
||||||
src_path = os.path.join(settings.MEDIA_ROOT, instance.path.name)
|
src_path = os.path.join(settings.MEDIA_ROOT, instance.path.name)
|
||||||
dest_path = os.path.join(settings.DATA_DIR, "ops", "playbook", instance.id.__str__())
|
dest_path = os.path.join(settings.DATA_DIR, "ops", "playbook", instance.id.__str__())
|
||||||
unzip_playbook(src_path, dest_path)
|
unzip_playbook(src_path, dest_path)
|
||||||
valid_entry = ('main.yml', 'main.yaml', 'main')
|
if 'main.yml' not in os.listdir(dest_path):
|
||||||
for f in os.listdir(dest_path):
|
raise PlaybookNoValidEntry
|
||||||
if f in valid_entry:
|
|
||||||
return
|
else:
|
||||||
os.remove(dest_path)
|
if instance.create_method == 'blank':
|
||||||
raise PlaybookNoValidEntry
|
dest_path = os.path.join(settings.DATA_DIR, "ops", "playbook", instance.id.__str__())
|
||||||
|
os.makedirs(dest_path)
|
||||||
|
with open(os.path.join(dest_path, 'main.yml'), 'w') as f:
|
||||||
|
f.write('## write your playbook here')
|
||||||
|
|
||||||
|
|
||||||
class PlaybookFileBrowserAPIView(APIView):
|
class PlaybookFileBrowserAPIView(APIView):
|
||||||
rbac_perms = ()
|
rbac_perms = ()
|
||||||
permission_classes = ()
|
permission_classes = ()
|
||||||
|
|
||||||
|
protected_files = ['root', 'main.yml']
|
||||||
|
|
||||||
def get(self, request, **kwargs):
|
def get(self, request, **kwargs):
|
||||||
playbook_id = kwargs.get('pk')
|
playbook_id = kwargs.get('pk')
|
||||||
playbook = get_object_or_404(Playbook, id=playbook_id)
|
playbook = get_object_or_404(Playbook, id=playbook_id)
|
||||||
|
@ -132,6 +129,10 @@ class PlaybookFileBrowserAPIView(APIView):
|
||||||
work_path = playbook.work_dir
|
work_path = playbook.work_dir
|
||||||
|
|
||||||
file_key = request.data.get('key', '')
|
file_key = request.data.get('key', '')
|
||||||
|
|
||||||
|
if file_key in self.protected_files:
|
||||||
|
return Response({'msg': '{} can not be modified'.format(file_key)}, status=400)
|
||||||
|
|
||||||
if os.path.dirname(file_key) == 'root':
|
if os.path.dirname(file_key) == 'root':
|
||||||
file_key = os.path.basename(file_key)
|
file_key = os.path.basename(file_key)
|
||||||
|
|
||||||
|
@ -145,6 +146,8 @@ class PlaybookFileBrowserAPIView(APIView):
|
||||||
|
|
||||||
if new_name:
|
if new_name:
|
||||||
new_file_path = os.path.join(os.path.dirname(file_path), new_name)
|
new_file_path = os.path.join(os.path.dirname(file_path), new_name)
|
||||||
|
if os.path.exists(new_file_path):
|
||||||
|
return Response({'msg': '{} already exists'.format(new_name)}, status=400)
|
||||||
os.rename(file_path, new_file_path)
|
os.rename(file_path, new_file_path)
|
||||||
file_path = new_file_path
|
file_path = new_file_path
|
||||||
|
|
||||||
|
@ -154,15 +157,14 @@ class PlaybookFileBrowserAPIView(APIView):
|
||||||
return Response({'msg': 'ok'})
|
return Response({'msg': 'ok'})
|
||||||
|
|
||||||
def delete(self, request, **kwargs):
|
def delete(self, request, **kwargs):
|
||||||
not_delete_allowed = ['root', 'main.yml']
|
|
||||||
playbook_id = kwargs.get('pk')
|
playbook_id = kwargs.get('pk')
|
||||||
playbook = get_object_or_404(Playbook, id=playbook_id)
|
playbook = get_object_or_404(Playbook, id=playbook_id)
|
||||||
work_path = playbook.work_dir
|
work_path = playbook.work_dir
|
||||||
file_key = request.query_params.get('key', '')
|
file_key = request.query_params.get('key', '')
|
||||||
if not file_key:
|
if not file_key:
|
||||||
return Response(status=400)
|
return Response({'msg': 'key is required'}, status=400)
|
||||||
if file_key in not_delete_allowed:
|
if file_key in self.protected_files:
|
||||||
return Response(status=400)
|
return Response({'msg': ' {} can not be delete'.format(file_key)}, status=400)
|
||||||
file_path = os.path.join(work_path, file_key)
|
file_path = os.path.join(work_path, file_key)
|
||||||
if os.path.isdir(file_path):
|
if os.path.isdir(file_path):
|
||||||
shutil.rmtree(file_path)
|
shutil.rmtree(file_path)
|
||||||
|
|
|
@ -92,7 +92,7 @@ class Job(JMSOrgBaseModel, PeriodTaskModelMixin):
|
||||||
return "{}:{}:{}".format(self.org.name, self.creator.name, self.playbook.name)
|
return "{}:{}:{}".format(self.org.name, self.creator.name, self.playbook.name)
|
||||||
|
|
||||||
def create_execution(self):
|
def create_execution(self):
|
||||||
return self.executions.create(job_version=self.version, material=self.material, job_type=Types[self.type].label)
|
return self.executions.create(job_version=self.version, material=self.material, job_type=Types[self.type].value)
|
||||||
|
|
||||||
class Meta:
|
class Meta:
|
||||||
verbose_name = _("Job")
|
verbose_name = _("Job")
|
||||||
|
@ -235,6 +235,8 @@ class JobExecution(JMSOrgBaseModel):
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def time_cost(self):
|
def time_cost(self):
|
||||||
|
if not self.date_start:
|
||||||
|
return 0
|
||||||
if self.is_finished:
|
if self.is_finished:
|
||||||
return (self.date_finished - self.date_start).total_seconds()
|
return (self.date_finished - self.date_start).total_seconds()
|
||||||
return (timezone.now() - self.date_start).total_seconds()
|
return (timezone.now() - self.date_start).total_seconds()
|
||||||
|
|
|
@ -59,7 +59,7 @@ class JobExecutionSerializer(BulkOrgResourceModelSerializer):
|
||||||
model = JobExecution
|
model = JobExecution
|
||||||
read_only_fields = ["id", "task_id", "timedelta", "time_cost",
|
read_only_fields = ["id", "task_id", "timedelta", "time_cost",
|
||||||
'is_finished', 'date_start', 'date_finished',
|
'is_finished', 'date_start', 'date_finished',
|
||||||
'date_created', 'is_success', 'task_id', 'job_type',
|
'date_created', 'is_success', 'job_type',
|
||||||
'summary', 'material']
|
'summary', 'material']
|
||||||
fields = read_only_fields + [
|
fields = read_only_fields + [
|
||||||
"job", "parameters", "creator"
|
"job", "parameters", "creator"
|
||||||
|
|
|
@ -12,10 +12,10 @@ app_name = "ops"
|
||||||
router = DefaultRouter()
|
router = DefaultRouter()
|
||||||
bulk_router = BulkRouter()
|
bulk_router = BulkRouter()
|
||||||
|
|
||||||
router.register(r'adhocs', api.AdHocViewSet, 'adhoc')
|
bulk_router.register(r'adhocs', api.AdHocViewSet, 'adhoc')
|
||||||
router.register(r'playbooks', api.PlaybookViewSet, 'playbook')
|
bulk_router.register(r'playbooks', api.PlaybookViewSet, 'playbook')
|
||||||
router.register(r'jobs', api.JobViewSet, 'job')
|
bulk_router.register(r'jobs', api.JobViewSet, 'job')
|
||||||
router.register(r'job-executions', api.JobExecutionViewSet, 'job-execution')
|
bulk_router.register(r'job-executions', api.JobExecutionViewSet, 'job-execution')
|
||||||
|
|
||||||
router.register(r'celery/period-tasks', api.CeleryPeriodTaskViewSet, 'celery-period-task')
|
router.register(r'celery/period-tasks', api.CeleryPeriodTaskViewSet, 'celery-period-task')
|
||||||
|
|
||||||
|
|
|
@ -30,32 +30,36 @@ def refresh_cache(name, org):
|
||||||
logger.warning('refresh cache fail: {}'.format(name))
|
logger.warning('refresh cache fail: {}'.format(name))
|
||||||
|
|
||||||
|
|
||||||
def refresh_user_amount_cache(user):
|
def refresh_all_orgs_user_amount_cache(user):
|
||||||
orgs = user.orgs.distinct()
|
orgs = user.orgs.distinct()
|
||||||
for org in orgs:
|
for org in orgs:
|
||||||
refresh_cache('users_amount', org)
|
refresh_cache('users_amount', org)
|
||||||
|
refresh_cache('new_users_amount_this_week', org)
|
||||||
|
|
||||||
|
|
||||||
@receiver(post_save, sender=OrgRoleBinding)
|
@receiver(post_save, sender=OrgRoleBinding)
|
||||||
def on_user_create_or_invite_refresh_cache(sender, instance, created, **kwargs):
|
def on_user_create_or_invite_refresh_cache(sender, instance, created, **kwargs):
|
||||||
if created:
|
if created:
|
||||||
refresh_cache('users_amount', instance.org)
|
refresh_cache('users_amount', instance.org)
|
||||||
|
refresh_cache('new_users_amount_this_week', instance.org)
|
||||||
|
|
||||||
|
|
||||||
@receiver(post_save, sender=SystemRoleBinding)
|
@receiver(post_save, sender=SystemRoleBinding)
|
||||||
def on_user_global_create_refresh_cache(sender, instance, created, **kwargs):
|
def on_user_global_create_refresh_cache(sender, instance, created, **kwargs):
|
||||||
if created and current_org.is_root():
|
if created and current_org.is_root():
|
||||||
refresh_cache('users_amount', current_org)
|
refresh_cache('users_amount', current_org)
|
||||||
|
refresh_cache('new_users_amount_this_week', current_org)
|
||||||
|
|
||||||
|
|
||||||
@receiver(pre_user_leave_org)
|
@receiver(pre_user_leave_org)
|
||||||
def on_user_remove_refresh_cache(sender, org=None, **kwargs):
|
def on_user_remove_refresh_cache(sender, org=None, **kwargs):
|
||||||
refresh_cache('users_amount', org)
|
refresh_cache('users_amount', org)
|
||||||
|
refresh_cache('new_users_amount_this_week', org)
|
||||||
|
|
||||||
|
|
||||||
@receiver(pre_delete, sender=User)
|
@receiver(pre_delete, sender=User)
|
||||||
def on_user_delete_refresh_cache(sender, instance, **kwargs):
|
def on_user_delete_refresh_cache(sender, instance, **kwargs):
|
||||||
refresh_user_amount_cache(instance)
|
refresh_all_orgs_user_amount_cache(instance)
|
||||||
|
|
||||||
|
|
||||||
model_cache_field_mapper = {
|
model_cache_field_mapper = {
|
||||||
|
|
|
@ -78,6 +78,7 @@ exclude_permissions = (
|
||||||
('orgs', 'organizationmember', '*', '*'),
|
('orgs', 'organizationmember', '*', '*'),
|
||||||
('settings', 'setting', 'add,change,delete', 'setting'),
|
('settings', 'setting', 'add,change,delete', 'setting'),
|
||||||
('audits', 'operatelog', 'add,delete,change', 'operatelog'),
|
('audits', 'operatelog', 'add,delete,change', 'operatelog'),
|
||||||
|
('audits', 'activitylog', 'add,delete,change', 'activitylog'),
|
||||||
('audits', 'passwordchangelog', 'add,change,delete', 'passwordchangelog'),
|
('audits', 'passwordchangelog', 'add,change,delete', 'passwordchangelog'),
|
||||||
('audits', 'userloginlog', 'add,change,delete,change', 'userloginlog'),
|
('audits', 'userloginlog', 'add,change,delete,change', 'userloginlog'),
|
||||||
('audits', 'ftplog', 'change,delete', 'ftplog'),
|
('audits', 'ftplog', 'change,delete', 'ftplog'),
|
||||||
|
|
|
@ -3,6 +3,7 @@ from django.db import models
|
||||||
from django.db.models import Q
|
from django.db.models import Q
|
||||||
from django.conf import settings
|
from django.conf import settings
|
||||||
from django.core.exceptions import ValidationError
|
from django.core.exceptions import ValidationError
|
||||||
|
from django.db.models.signals import post_save
|
||||||
from rest_framework.serializers import ValidationError
|
from rest_framework.serializers import ValidationError
|
||||||
|
|
||||||
from common.db.models import JMSBaseModel, CASCADE_SIGNAL_SKIP
|
from common.db.models import JMSBaseModel, CASCADE_SIGNAL_SKIP
|
||||||
|
@ -15,6 +16,13 @@ __all__ = ['RoleBinding', 'SystemRoleBinding', 'OrgRoleBinding']
|
||||||
|
|
||||||
|
|
||||||
class RoleBindingManager(models.Manager):
|
class RoleBindingManager(models.Manager):
|
||||||
|
|
||||||
|
def bulk_create(self, objs, batch_size=None, ignore_conflicts=False):
|
||||||
|
objs = super().bulk_create(objs, batch_size=batch_size, ignore_conflicts=ignore_conflicts)
|
||||||
|
for i in objs:
|
||||||
|
post_save.send(i.__class__, instance=i, created=True)
|
||||||
|
return objs
|
||||||
|
|
||||||
def get_queryset(self):
|
def get_queryset(self):
|
||||||
queryset = super(RoleBindingManager, self).get_queryset()
|
queryset = super(RoleBindingManager, self).get_queryset()
|
||||||
q = Q(scope=Scope.system, org__isnull=True)
|
q = Q(scope=Scope.system, org__isnull=True)
|
||||||
|
|
|
@ -1,8 +1,8 @@
|
||||||
from django.dispatch import receiver
|
from django.dispatch import receiver
|
||||||
from django.db.models.signals import post_migrate, post_save
|
from django.db.models.signals import post_migrate, post_save, m2m_changed, post_delete
|
||||||
from django.apps import apps
|
from django.apps import apps
|
||||||
|
|
||||||
from .models import SystemRole, OrgRole
|
from .models import SystemRole, OrgRole, OrgRoleBinding, SystemRoleBinding
|
||||||
from .builtin import BuiltinRole
|
from .builtin import BuiltinRole
|
||||||
|
|
||||||
|
|
||||||
|
@ -21,7 +21,32 @@ def on_system_role_update(sender, instance, created, **kwargs):
|
||||||
User.expire_users_rbac_perms_cache()
|
User.expire_users_rbac_perms_cache()
|
||||||
|
|
||||||
|
|
||||||
|
@receiver(m2m_changed, sender=SystemRole.permissions.through)
|
||||||
|
def on_system_role_permission_changed(sender, instance, action, **kwargs):
|
||||||
|
from users.models import User
|
||||||
|
User.expire_users_rbac_perms_cache()
|
||||||
|
|
||||||
|
|
||||||
|
@receiver([post_save, post_delete], sender=SystemRoleBinding)
|
||||||
|
def on_system_role_binding_update(sender, instance, created, **kwargs):
|
||||||
|
from users.models import User
|
||||||
|
User.expire_users_rbac_perms_cache()
|
||||||
|
|
||||||
|
|
||||||
@receiver(post_save, sender=OrgRole)
|
@receiver(post_save, sender=OrgRole)
|
||||||
def on_org_role_update(sender, instance, created, **kwargs):
|
def on_org_role_update(sender, instance, created, **kwargs):
|
||||||
from users.models import User
|
from users.models import User
|
||||||
User.expire_users_rbac_perms_cache()
|
User.expire_users_rbac_perms_cache()
|
||||||
|
|
||||||
|
|
||||||
|
@receiver(m2m_changed, sender=OrgRole.permissions.through)
|
||||||
|
def on_org_role_permission_changed(sender, instance, action, **kwargs):
|
||||||
|
from users.models import User
|
||||||
|
User.expire_users_rbac_perms_cache()
|
||||||
|
|
||||||
|
|
||||||
|
@receiver([post_save, post_delete], sender=OrgRoleBinding)
|
||||||
|
def on_org_role_binding_update(sender, instance, **kwargs):
|
||||||
|
print('>>>>>>>>>>>')
|
||||||
|
from users.models import User
|
||||||
|
User.expire_users_rbac_perms_cache()
|
||||||
|
|
|
@ -49,7 +49,7 @@ class OAuth2SettingSerializer(serializers.Serializer):
|
||||||
required=True, max_length=1024, label=_('Provider userinfo endpoint')
|
required=True, max_length=1024, label=_('Provider userinfo endpoint')
|
||||||
)
|
)
|
||||||
AUTH_OAUTH2_PROVIDER_END_SESSION_ENDPOINT = serializers.CharField(
|
AUTH_OAUTH2_PROVIDER_END_SESSION_ENDPOINT = serializers.CharField(
|
||||||
required=False, max_length=1024, label=_('Provider end session endpoint')
|
required=False, allow_blank=True, max_length=1024, label=_('Provider end session endpoint')
|
||||||
)
|
)
|
||||||
AUTH_OAUTH2_LOGOUT_COMPLETELY = serializers.BooleanField(required=False, label=_('Logout completely'))
|
AUTH_OAUTH2_LOGOUT_COMPLETELY = serializers.BooleanField(required=False, label=_('Logout completely'))
|
||||||
AUTH_OAUTH2_USER_ATTR_MAP = serializers.DictField(
|
AUTH_OAUTH2_USER_ATTR_MAP = serializers.DictField(
|
||||||
|
|
|
@ -31,4 +31,7 @@ class CleaningSerializer(serializers.Serializer):
|
||||||
min_value=1, max_value=99999, required=True, label=_('Session keep duration'),
|
min_value=1, max_value=99999, required=True, label=_('Session keep duration'),
|
||||||
help_text=_('Unit: days, Session, record, command will be delete if more than duration, only in database')
|
help_text=_('Unit: days, Session, record, command will be delete if more than duration, only in database')
|
||||||
)
|
)
|
||||||
|
ACTIVITY_LOG_KEEP_DAYS = serializers.IntegerField(
|
||||||
|
min_value=1, max_value=9999,
|
||||||
|
label=_("Activity log keep days"), help_text=_("Unit: day")
|
||||||
|
)
|
||||||
|
|
|
@ -24,6 +24,7 @@ class SmartEndpointViewMixin:
|
||||||
target_protocol: None
|
target_protocol: None
|
||||||
|
|
||||||
@action(methods=['get'], detail=False, permission_classes=[IsValidUserOrConnectionToken])
|
@action(methods=['get'], detail=False, permission_classes=[IsValidUserOrConnectionToken])
|
||||||
|
@tmp_to_root_org()
|
||||||
def smart(self, request, *args, **kwargs):
|
def smart(self, request, *args, **kwargs):
|
||||||
self.target_instance = self.get_target_instance()
|
self.target_instance = self.get_target_instance()
|
||||||
self.target_protocol = self.get_target_protocol()
|
self.target_protocol = self.get_target_protocol()
|
||||||
|
|
|
@ -93,14 +93,21 @@ class WebAPP(object):
|
||||||
self.asset = asset
|
self.asset = asset
|
||||||
self.account = account
|
self.account = account
|
||||||
self.platform = platform
|
self.platform = platform
|
||||||
|
|
||||||
self.extra_data = self.asset.spec_info
|
|
||||||
self._steps = list()
|
self._steps = list()
|
||||||
autofill_type = self.asset.spec_info.autofill
|
|
||||||
|
extra_data = self.asset.spec_info
|
||||||
|
autofill_type = extra_data.autofill
|
||||||
|
if not autofill_type:
|
||||||
|
protocol_setting = self.platform.get_protocol_setting("http")
|
||||||
|
if not protocol_setting:
|
||||||
|
print("No protocol setting found")
|
||||||
|
return
|
||||||
|
extra_data = protocol_setting
|
||||||
|
autofill_type = extra_data.autofill
|
||||||
if autofill_type == "basic":
|
if autofill_type == "basic":
|
||||||
self._steps = self._default_custom_steps()
|
self._steps = self._default_custom_steps(extra_data)
|
||||||
elif autofill_type == "script":
|
elif autofill_type == "script":
|
||||||
script_list = self.asset.spec_info.script
|
script_list = extra_data.script
|
||||||
steps = sorted(script_list, key=lambda step_item: step_item.step)
|
steps = sorted(script_list, key=lambda step_item: step_item.step)
|
||||||
for item in steps:
|
for item in steps:
|
||||||
val = item.value
|
val = item.value
|
||||||
|
@ -110,9 +117,8 @@ class WebAPP(object):
|
||||||
item.value = val
|
item.value = val
|
||||||
self._steps.append(item)
|
self._steps.append(item)
|
||||||
|
|
||||||
def _default_custom_steps(self) -> list:
|
def _default_custom_steps(self, spec_info) -> list:
|
||||||
account = self.account
|
account = self.account
|
||||||
spec_info = self.asset.spec_info
|
|
||||||
default_steps = [
|
default_steps = [
|
||||||
Step({
|
Step({
|
||||||
"step": 1,
|
"step": 1,
|
||||||
|
|
|
@ -77,15 +77,18 @@ def wait_pid(pid):
|
||||||
break
|
break
|
||||||
|
|
||||||
|
|
||||||
class DictObj:
|
class DictObj(dict):
|
||||||
def __init__(self, in_dict: dict):
|
def __init__(self, *args, **kwargs):
|
||||||
assert isinstance(in_dict, dict)
|
super().__init__(*args, **kwargs)
|
||||||
for key, val in in_dict.items():
|
for key, val in self.items():
|
||||||
if isinstance(val, (list, tuple)):
|
if isinstance(val, (list, tuple)):
|
||||||
setattr(self, key, [DictObj(x) if isinstance(x, dict) else x for x in val])
|
setattr(self, key, [DictObj(x) if isinstance(x, dict) else x for x in val])
|
||||||
else:
|
else:
|
||||||
setattr(self, key, DictObj(val) if isinstance(val, dict) else val)
|
setattr(self, key, DictObj(val) if isinstance(val, dict) else val)
|
||||||
|
|
||||||
|
def __getattr__(self, item):
|
||||||
|
return self.get(item, None)
|
||||||
|
|
||||||
|
|
||||||
class User(DictObj):
|
class User(DictObj):
|
||||||
id: str
|
id: str
|
||||||
|
@ -151,11 +154,32 @@ class Account(DictObj):
|
||||||
secret_type: LabelValue
|
secret_type: LabelValue
|
||||||
|
|
||||||
|
|
||||||
|
class ProtocolSetting(DictObj):
|
||||||
|
autofill: str
|
||||||
|
username_selector: str
|
||||||
|
password_selector: str
|
||||||
|
submit_selector: str
|
||||||
|
script: list[Step]
|
||||||
|
|
||||||
|
|
||||||
|
class PlatformProtocolSetting(DictObj):
|
||||||
|
name: str
|
||||||
|
port: int
|
||||||
|
setting: ProtocolSetting
|
||||||
|
|
||||||
|
|
||||||
class Platform(DictObj):
|
class Platform(DictObj):
|
||||||
id: str
|
id: str
|
||||||
name: str
|
name: str
|
||||||
charset: LabelValue
|
charset: LabelValue
|
||||||
type: LabelValue
|
type: LabelValue
|
||||||
|
protocols: list[PlatformProtocolSetting]
|
||||||
|
|
||||||
|
def get_protocol_setting(self, protocol):
|
||||||
|
for item in self.protocols:
|
||||||
|
if item.name == protocol:
|
||||||
|
return item.setting
|
||||||
|
return None
|
||||||
|
|
||||||
|
|
||||||
class Manifest(DictObj):
|
class Manifest(DictObj):
|
||||||
|
|
|
@ -1,3 +1,4 @@
|
||||||
|
import os
|
||||||
import sys
|
import sys
|
||||||
import time
|
import time
|
||||||
|
|
||||||
|
@ -6,9 +7,11 @@ if sys.platform == 'win32':
|
||||||
import win32api
|
import win32api
|
||||||
|
|
||||||
from pywinauto import Application
|
from pywinauto import Application
|
||||||
from pywinauto.controls.uia_controls import (
|
from pywinauto.controls.uia_controls import ButtonWrapper
|
||||||
EditWrapper, ComboBoxWrapper, ButtonWrapper
|
from pywinauto.keyboard import send_keys
|
||||||
)
|
|
||||||
|
import const as c
|
||||||
|
|
||||||
from common import wait_pid, BaseApplication
|
from common import wait_pid, BaseApplication
|
||||||
|
|
||||||
_default_path = r'C:\Program Files\PremiumSoft\Navicat Premium 16\navicat.exe'
|
_default_path = r'C:\Program Files\PremiumSoft\Navicat Premium 16\navicat.exe'
|
||||||
|
@ -29,17 +32,16 @@ class AppletApplication(BaseApplication):
|
||||||
self.app = None
|
self.app = None
|
||||||
|
|
||||||
def clean_up(self):
|
def clean_up(self):
|
||||||
protocol_mapping = {
|
protocols = (
|
||||||
'mariadb': 'NavicatMARIADB', 'mongodb': 'NavicatMONGODB',
|
'NavicatMARIADB', 'NavicatMONGODB', 'Navicat',
|
||||||
'mysql': 'Navicat', 'oracle': 'NavicatORA',
|
'NavicatORA', 'NavicatMSSQL', 'NavicatPG'
|
||||||
'sqlserver': 'NavicatMSSQL', 'postgresql': 'NavicatPG'
|
)
|
||||||
}
|
for p in protocols:
|
||||||
protocol_display = protocol_mapping.get(self.protocol, 'mysql')
|
sub_key = r'Software\PremiumSoft\%s\Servers' % p
|
||||||
sub_key = r'Software\PremiumSoft\%s\Servers' % protocol_display
|
try:
|
||||||
try:
|
win32api.RegDeleteTree(winreg.HKEY_CURRENT_USER, sub_key)
|
||||||
win32api.RegDeleteTree(winreg.HKEY_CURRENT_USER, sub_key)
|
except Exception:
|
||||||
except Exception as err:
|
pass
|
||||||
print('Error: %s' % err)
|
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def launch():
|
def launch():
|
||||||
|
@ -50,134 +52,208 @@ class AppletApplication(BaseApplication):
|
||||||
winreg.SetValueEx(key, 'AlreadyShowNavicatV16WelcomeScreen', 0, winreg.REG_DWORD, 1)
|
winreg.SetValueEx(key, 'AlreadyShowNavicatV16WelcomeScreen', 0, winreg.REG_DWORD, 1)
|
||||||
# 禁止开启自动检查更新
|
# 禁止开启自动检查更新
|
||||||
winreg.SetValueEx(key, 'AutoCheckUpdate', 0, winreg.REG_DWORD, 0)
|
winreg.SetValueEx(key, 'AutoCheckUpdate', 0, winreg.REG_DWORD, 0)
|
||||||
|
# 禁止弹出初始化界面
|
||||||
winreg.SetValueEx(key, 'ShareUsageData', 0, winreg.REG_DWORD, 0)
|
winreg.SetValueEx(key, 'ShareUsageData', 0, winreg.REG_DWORD, 0)
|
||||||
except Exception as err:
|
except Exception as err:
|
||||||
print('Launch error: %s' % err)
|
print('Launch error: %s' % err)
|
||||||
|
|
||||||
def _fill_to_mysql(self, app, menu, protocol_display='MySQL'):
|
@staticmethod
|
||||||
menu.item_by_path('File->New Connection->%s' % protocol_display).click_input()
|
def _exec_commands(commands):
|
||||||
conn_window = app.window(best_match='Dialog').child_window(title_re='New Connection')
|
for command in commands:
|
||||||
|
if command['type'] == 'key':
|
||||||
|
time.sleep(0.5)
|
||||||
|
send_keys(' '.join(command['commands']))
|
||||||
|
elif command['type'] == 'action':
|
||||||
|
for f in command['commands']:
|
||||||
|
f()
|
||||||
|
|
||||||
name_ele = conn_window.child_window(best_match='Edit5')
|
def _action_not_remember_password(self):
|
||||||
EditWrapper(name_ele.element_info).set_edit_text(self.name)
|
conn_window = self.app.window(best_match='Dialog'). \
|
||||||
|
child_window(title_re='New Connection')
|
||||||
|
remember_checkbox = conn_window.child_window(best_match='Save password')
|
||||||
|
remember_checkbox.click()
|
||||||
|
|
||||||
host_ele = conn_window.child_window(best_match='Edit4')
|
def _get_mysql_commands(self):
|
||||||
EditWrapper(host_ele.element_info).set_edit_text(self.host)
|
commands = [
|
||||||
|
{
|
||||||
|
'type': 'key',
|
||||||
|
'commands': [
|
||||||
|
'%f', c.DOWN, c.RIGHT, c.ENTER
|
||||||
|
],
|
||||||
|
},
|
||||||
|
{
|
||||||
|
'type': 'key',
|
||||||
|
'commands': [
|
||||||
|
self.name, c.TAB, self.host, c.TAB,
|
||||||
|
str(self.port), c.TAB, self.username,
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
'type': 'action',
|
||||||
|
'commands': [
|
||||||
|
self._action_not_remember_password
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
'type': 'key',
|
||||||
|
'commands': [c.ENTER]
|
||||||
|
}
|
||||||
|
]
|
||||||
|
return commands
|
||||||
|
|
||||||
port_ele = conn_window.child_window(best_match='Edit2')
|
def _get_mariadb_commands(self):
|
||||||
EditWrapper(port_ele.element_info).set_edit_text(self.port)
|
commands = [
|
||||||
|
{
|
||||||
|
'type': 'key',
|
||||||
|
'commands': [
|
||||||
|
'%f', c.DOWN, c.RIGHT, c.DOWN * 5, c.ENTER,
|
||||||
|
],
|
||||||
|
},
|
||||||
|
{
|
||||||
|
'type': 'key',
|
||||||
|
'commands': [
|
||||||
|
self.name, c.TAB, self.host, c.TAB,
|
||||||
|
str(self.port), c.TAB, self.username
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
'type': 'action',
|
||||||
|
'commands': [
|
||||||
|
self._action_not_remember_password
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
'type': 'key',
|
||||||
|
'commands': [c.ENTER]
|
||||||
|
}
|
||||||
|
]
|
||||||
|
return commands
|
||||||
|
|
||||||
username_ele = conn_window.child_window(best_match='Edit1')
|
def _get_mongodb_commands(self):
|
||||||
EditWrapper(username_ele.element_info).set_edit_text(self.username)
|
commands = [
|
||||||
|
{
|
||||||
|
'type': 'key',
|
||||||
|
'commands': [
|
||||||
|
'%f', c.DOWN, c.RIGHT, c.DOWN * 6, c.ENTER,
|
||||||
|
],
|
||||||
|
},
|
||||||
|
{
|
||||||
|
'type': 'key',
|
||||||
|
'commands': [
|
||||||
|
self.name, c.TAB * 3, self.host, c.TAB, str(self.port),
|
||||||
|
c.TAB, c.DOWN, c.TAB, self.db, c.TAB, self.username,
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
'type': 'action',
|
||||||
|
'commands': [
|
||||||
|
self._action_not_remember_password
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
'type': 'key',
|
||||||
|
'commands': [c.ENTER]
|
||||||
|
}
|
||||||
|
]
|
||||||
|
return commands
|
||||||
|
|
||||||
password_ele = conn_window.child_window(best_match='Edit3')
|
def _get_postgresql_commands(self):
|
||||||
EditWrapper(password_ele.element_info).set_edit_text(self.password)
|
commands = [
|
||||||
|
{
|
||||||
|
'type': 'key',
|
||||||
|
'commands': [
|
||||||
|
'%f', c.DOWN, c.RIGHT, c.DOWN, c.ENTER,
|
||||||
|
],
|
||||||
|
},
|
||||||
|
{
|
||||||
|
'type': 'key',
|
||||||
|
'commands': [
|
||||||
|
self.name, c.TAB, self.host, c.TAB, str(self.port),
|
||||||
|
c.TAB, self.db, c.TAB, self.username
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
'type': 'action',
|
||||||
|
'commands': [
|
||||||
|
self._action_not_remember_password
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
'type': 'key',
|
||||||
|
'commands': [c.ENTER]
|
||||||
|
}
|
||||||
|
]
|
||||||
|
return commands
|
||||||
|
|
||||||
def _fill_to_mariadb(self, app, menu):
|
def _get_sqlserver_commands(self):
|
||||||
self._fill_to_mysql(app, menu, 'MariaDB')
|
commands = [
|
||||||
|
{
|
||||||
def _fill_to_mongodb(self, app, menu):
|
'type': 'key',
|
||||||
menu.item_by_path('File->New Connection->MongoDB').click_input()
|
'commands': [
|
||||||
conn_window = app.window(best_match='Dialog').child_window(title_re='New Connection')
|
'%f', c.DOWN, c.RIGHT, c.DOWN * 4, c.ENTER,
|
||||||
|
],
|
||||||
auth_type_ele = conn_window.child_window(best_match='ComboBox2')
|
},
|
||||||
ComboBoxWrapper(auth_type_ele.element_info).select('Password')
|
{
|
||||||
|
'type': 'key',
|
||||||
name_ele = conn_window.child_window(best_match='Edit5')
|
'commands': [
|
||||||
EditWrapper(name_ele.element_info).set_edit_text(self.name)
|
self.name, c.TAB, '%s,%s' % (self.host, self.port),
|
||||||
|
c.TAB * 2, self.db, c.TAB * 2, self.username
|
||||||
host_ele = conn_window.child_window(best_match='Edit4')
|
]
|
||||||
EditWrapper(host_ele.element_info).set_edit_text(self.host)
|
},
|
||||||
|
{
|
||||||
port_ele = conn_window.child_window(best_match='Edit2')
|
'type': 'action',
|
||||||
EditWrapper(port_ele.element_info).set_edit_text(self.port)
|
'commands': [
|
||||||
|
self._action_not_remember_password
|
||||||
db_ele = conn_window.child_window(best_match='Edit6')
|
]
|
||||||
EditWrapper(db_ele.element_info).set_edit_text(self.db)
|
},
|
||||||
|
{
|
||||||
username_ele = conn_window.child_window(best_match='Edit1')
|
'type': 'key',
|
||||||
EditWrapper(username_ele.element_info).set_edit_text(self.username)
|
'commands': [c.ENTER]
|
||||||
|
}
|
||||||
password_ele = conn_window.child_window(best_match='Edit3')
|
]
|
||||||
EditWrapper(password_ele.element_info).set_edit_text(self.password)
|
return commands
|
||||||
|
|
||||||
def _fill_to_postgresql(self, app, menu):
|
|
||||||
menu.item_by_path('File->New Connection->PostgreSQL').click_input()
|
|
||||||
conn_window = app.window(best_match='Dialog').child_window(title_re='New Connection')
|
|
||||||
|
|
||||||
name_ele = conn_window.child_window(best_match='Edit6')
|
|
||||||
EditWrapper(name_ele.element_info).set_edit_text(self.name)
|
|
||||||
|
|
||||||
host_ele = conn_window.child_window(best_match='Edit5')
|
|
||||||
EditWrapper(host_ele.element_info).set_edit_text(self.host)
|
|
||||||
|
|
||||||
port_ele = conn_window.child_window(best_match='Edit2')
|
|
||||||
EditWrapper(port_ele.element_info).set_edit_text(self.port)
|
|
||||||
|
|
||||||
db_ele = conn_window.child_window(best_match='Edit4')
|
|
||||||
EditWrapper(db_ele.element_info).set_edit_text(self.db)
|
|
||||||
|
|
||||||
username_ele = conn_window.child_window(best_match='Edit1')
|
|
||||||
EditWrapper(username_ele.element_info).set_edit_text(self.username)
|
|
||||||
|
|
||||||
password_ele = conn_window.child_window(best_match='Edit3')
|
|
||||||
EditWrapper(password_ele.element_info).set_edit_text(self.password)
|
|
||||||
|
|
||||||
def _fill_to_sqlserver(self, app, menu):
|
|
||||||
menu.item_by_path('File->New Connection->SQL Server').click_input()
|
|
||||||
conn_window = app.window(best_match='Dialog').child_window(title_re='New Connection')
|
|
||||||
|
|
||||||
name_ele = conn_window.child_window(best_match='Edit5')
|
|
||||||
EditWrapper(name_ele.element_info).set_edit_text(self.name)
|
|
||||||
|
|
||||||
host_ele = conn_window.child_window(best_match='Edit4')
|
|
||||||
EditWrapper(host_ele.element_info).set_edit_text('%s,%s' % (self.host, self.port))
|
|
||||||
|
|
||||||
db_ele = conn_window.child_window(best_match='Edit3')
|
|
||||||
EditWrapper(db_ele.element_info).set_edit_text(self.db)
|
|
||||||
|
|
||||||
username_ele = conn_window.child_window(best_match='Edit6')
|
|
||||||
EditWrapper(username_ele.element_info).set_edit_text(self.username)
|
|
||||||
|
|
||||||
password_ele = conn_window.child_window(best_match='Edit2')
|
|
||||||
EditWrapper(password_ele.element_info).set_edit_text(self.password)
|
|
||||||
|
|
||||||
def _fill_to_oracle(self, app, menu):
|
|
||||||
menu.item_by_path('File->New Connection->Oracle').click_input()
|
|
||||||
conn_window = app.window(best_match='Dialog').child_window(title_re='New Connection')
|
|
||||||
|
|
||||||
name_ele = conn_window.child_window(best_match='Edit6')
|
|
||||||
EditWrapper(name_ele.element_info).set_edit_text(self.name)
|
|
||||||
|
|
||||||
host_ele = conn_window.child_window(best_match='Edit5')
|
|
||||||
EditWrapper(host_ele.element_info).set_edit_text(self.host)
|
|
||||||
|
|
||||||
port_ele = conn_window.child_window(best_match='Edit3')
|
|
||||||
EditWrapper(port_ele.element_info).set_edit_text(self.port)
|
|
||||||
|
|
||||||
db_ele = conn_window.child_window(best_match='Edit2')
|
|
||||||
EditWrapper(db_ele.element_info).set_edit_text(self.db)
|
|
||||||
|
|
||||||
username_ele = conn_window.child_window(best_match='Edit')
|
|
||||||
EditWrapper(username_ele.element_info).set_edit_text(self.username)
|
|
||||||
|
|
||||||
password_ele = conn_window.child_window(best_match='Edit4')
|
|
||||||
EditWrapper(password_ele.element_info).set_edit_text(self.password)
|
|
||||||
|
|
||||||
|
def _get_oracle_commands(self):
|
||||||
|
commands = [
|
||||||
|
{
|
||||||
|
'type': 'key',
|
||||||
|
'commands': [
|
||||||
|
'%f', c.DOWN, c.RIGHT, c.DOWN * 2, c.ENTER,
|
||||||
|
],
|
||||||
|
},
|
||||||
|
{
|
||||||
|
'type': 'key',
|
||||||
|
'commands': [
|
||||||
|
self.name, c.TAB * 2, self.host, c.TAB,
|
||||||
|
str(self.port), c.TAB, self.db, c.TAB, c.TAB, self.username,
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
'type': 'action',
|
||||||
|
'commands': (self._action_not_remember_password,)
|
||||||
|
},
|
||||||
|
{
|
||||||
|
'type': 'key',
|
||||||
|
'commands': [c.ENTER]
|
||||||
|
}
|
||||||
|
]
|
||||||
if self.privileged:
|
if self.privileged:
|
||||||
conn_window.child_window(best_match='Advanced', control_type='TabItem').click_input()
|
commands.insert(3, {
|
||||||
role_ele = conn_window.child_window(best_match='ComboBox2')
|
'type': 'key',
|
||||||
ComboBoxWrapper(role_ele.element_info).select('SYSDBA')
|
'commands': (c.TAB * 4, c.RIGHT, c.TAB * 3, c.DOWN)
|
||||||
|
})
|
||||||
|
return commands
|
||||||
|
|
||||||
def run(self):
|
def run(self):
|
||||||
self.launch()
|
self.launch()
|
||||||
app = Application(backend='uia')
|
self.app = Application(backend='uia')
|
||||||
app.start(self.path)
|
work_dir = os.path.dirname(self.path)
|
||||||
self.pid = app.process
|
self.app.start(self.path, work_dir=work_dir)
|
||||||
|
self.pid = self.app.process
|
||||||
|
|
||||||
# 检测是否为试用版本
|
# 检测是否为试用版本
|
||||||
try:
|
try:
|
||||||
trial_btn = app.top_window().child_window(
|
trial_btn = self.app.top_window().child_window(
|
||||||
best_match='Trial', control_type='Button'
|
best_match='Trial', control_type='Button'
|
||||||
)
|
)
|
||||||
ButtonWrapper(trial_btn.element_info).click()
|
ButtonWrapper(trial_btn.element_info).click()
|
||||||
|
@ -185,26 +261,27 @@ class AppletApplication(BaseApplication):
|
||||||
except Exception:
|
except Exception:
|
||||||
pass
|
pass
|
||||||
|
|
||||||
menubar = app.window(best_match='Navicat Premium', control_type='Window') \
|
# 根据协议获取相应操作命令
|
||||||
.child_window(best_match='Menu', control_type='MenuBar')
|
action = getattr(self, '_get_%s_commands' % self.protocol, None)
|
||||||
|
|
||||||
file = menubar.child_window(best_match='File', control_type='MenuItem')
|
|
||||||
file.click_input()
|
|
||||||
menubar.item_by_path('File->New Connection').click_input()
|
|
||||||
|
|
||||||
# 根据协议选择动作
|
|
||||||
action = getattr(self, '_fill_to_%s' % self.protocol, None)
|
|
||||||
if action is None:
|
if action is None:
|
||||||
raise ValueError('This protocol is not supported: %s' % self.protocol)
|
raise ValueError('This protocol is not supported: %s' % self.protocol)
|
||||||
action(app, menubar)
|
commands = action()
|
||||||
|
# 关闭掉桌面许可弹框
|
||||||
conn_window = app.window(best_match='Dialog').child_window(title_re='New Connection')
|
commands.insert(0, {'type': 'key', 'commands': (c.ENTER,)})
|
||||||
ok_btn = conn_window.child_window(best_match='OK', control_type='Button')
|
# 登录
|
||||||
ok_btn.click()
|
commands.extend([
|
||||||
|
{
|
||||||
file.click_input()
|
'type': 'key',
|
||||||
menubar.item_by_path('File->Open Connection').click_input()
|
'commands': (
|
||||||
self.app = app
|
'%f', c.DOWN * 5, c.ENTER
|
||||||
|
)
|
||||||
|
},
|
||||||
|
{
|
||||||
|
'type': 'key',
|
||||||
|
'commands': (self.password, c.ENTER)
|
||||||
|
}
|
||||||
|
])
|
||||||
|
self._exec_commands(commands)
|
||||||
|
|
||||||
def wait(self):
|
def wait(self):
|
||||||
try:
|
try:
|
||||||
|
|
|
@ -0,0 +1,7 @@
|
||||||
|
|
||||||
|
UP = '{UP}'
|
||||||
|
LEFT = '{LEFT}'
|
||||||
|
DOWN = '{DOWN}'
|
||||||
|
RIGHT = '{RIGHT}'
|
||||||
|
TAB = '{VK_TAB}'
|
||||||
|
ENTER = '{VK_RETURN}'
|
|
@ -170,8 +170,8 @@ class ConnectMethodUtil:
|
||||||
'web_methods': [WebMethod.web_gui],
|
'web_methods': [WebMethod.web_gui],
|
||||||
'listen': [Protocol.http],
|
'listen': [Protocol.http],
|
||||||
'support': [
|
'support': [
|
||||||
Protocol.mysql, Protocol.postgresql, Protocol.oracle,
|
Protocol.mysql, Protocol.postgresql,
|
||||||
Protocol.sqlserver, Protocol.mariadb
|
Protocol.oracle, Protocol.mariadb
|
||||||
],
|
],
|
||||||
'match': 'm2m'
|
'match': 'm2m'
|
||||||
},
|
},
|
||||||
|
|
|
@ -74,7 +74,7 @@ class Endpoint(JMSBaseModel):
|
||||||
from assets.models import Asset
|
from assets.models import Asset
|
||||||
from terminal.models import Session
|
from terminal.models import Session
|
||||||
if isinstance(instance, Session):
|
if isinstance(instance, Session):
|
||||||
instance = instance.get_asset_or_application()
|
instance = instance.get_asset()
|
||||||
if not isinstance(instance, Asset):
|
if not isinstance(instance, Asset):
|
||||||
return None
|
return None
|
||||||
values = instance.labels.filter(name='endpoint').values_list('value', flat=True)
|
values = instance.labels.filter(name='endpoint').values_list('value', flat=True)
|
||||||
|
|
|
@ -178,14 +178,11 @@ class Session(OrgModelMixin):
|
||||||
def login_from_display(self):
|
def login_from_display(self):
|
||||||
return self.get_login_from_display()
|
return self.get_login_from_display()
|
||||||
|
|
||||||
def get_asset_or_application(self):
|
def get_asset(self):
|
||||||
instance = get_object_or_none(Asset, pk=self.asset_id)
|
return get_object_or_none(Asset, pk=self.asset_id)
|
||||||
if not instance:
|
|
||||||
instance = get_object_or_none(Application, pk=self.asset_id)
|
|
||||||
return instance
|
|
||||||
|
|
||||||
def get_target_ip(self):
|
def get_target_ip(self):
|
||||||
instance = self.get_asset_or_application()
|
instance = self.get_asset()
|
||||||
target_ip = instance.get_target_ip() if instance else ''
|
target_ip = instance.get_target_ip() if instance else ''
|
||||||
return target_ip
|
return target_ip
|
||||||
|
|
||||||
|
|
Loading…
Reference in New Issue