mirror of https://github.com/jumpserver/jumpserver
perf: 账号备份
parent
a86d5c1456
commit
389094f615
|
@ -0,0 +1,66 @@
|
||||||
|
# Generated by Django 3.2.13 on 2022-09-16 07:56
|
||||||
|
from functools import reduce
|
||||||
|
from django.db import migrations, models
|
||||||
|
from assets.const import AllTypes, HostTypes
|
||||||
|
|
||||||
|
|
||||||
|
def migrate_backup_types(apps, schema_editor):
|
||||||
|
all_types = list(reduce(
|
||||||
|
lambda x, y: x + y,
|
||||||
|
[
|
||||||
|
[j['value'] for j in i['children']]
|
||||||
|
for i in AllTypes.grouped_choices_to_objs()
|
||||||
|
]
|
||||||
|
))
|
||||||
|
asset_types = [i[0] for i in HostTypes.choices]
|
||||||
|
app_types = list(set(all_types) - set(asset_types))
|
||||||
|
|
||||||
|
backup_model = apps.get_model("assets", "AccountBackupPlan")
|
||||||
|
backup_objs = []
|
||||||
|
for instance in backup_model.objects.all():
|
||||||
|
types = instance.types
|
||||||
|
if types == 1:
|
||||||
|
instance.categories = asset_types
|
||||||
|
elif types == 2:
|
||||||
|
instance.categories = app_types
|
||||||
|
elif types == 255:
|
||||||
|
instance.categories = all_types
|
||||||
|
else:
|
||||||
|
instance.categories = []
|
||||||
|
backup_objs.append(instance)
|
||||||
|
backup_model.objects.bulk_update(backup_objs, ['categories'])
|
||||||
|
|
||||||
|
backup_execution_model = apps.get_model("assets", "AccountBackupPlanExecution")
|
||||||
|
backup_execution_objs = []
|
||||||
|
for instance in backup_execution_model.objects.all():
|
||||||
|
types = instance.plan_snapshot.get('types', [])
|
||||||
|
if 'all' in types:
|
||||||
|
instance.plan_snapshot['categories'] = all_types
|
||||||
|
elif 'asset' in types:
|
||||||
|
instance.plan_snapshot['categories'] = asset_types
|
||||||
|
elif 'application' in types:
|
||||||
|
instance.plan_snapshot['categories'] = app_types
|
||||||
|
else:
|
||||||
|
instance.categories = []
|
||||||
|
instance.plan_snapshot.pop('types', None)
|
||||||
|
backup_execution_objs.append(instance)
|
||||||
|
backup_execution_model.objects.bulk_update(backup_execution_objs, ['plan_snapshot'])
|
||||||
|
|
||||||
|
|
||||||
|
class Migration(migrations.Migration):
|
||||||
|
dependencies = [
|
||||||
|
('assets', '0108_auto_20220915_1032'),
|
||||||
|
]
|
||||||
|
|
||||||
|
operations = [
|
||||||
|
migrations.AddField(
|
||||||
|
model_name='accountbackupplan',
|
||||||
|
name='categories',
|
||||||
|
field=models.JSONField(default=list),
|
||||||
|
),
|
||||||
|
migrations.RunPython(migrate_backup_types),
|
||||||
|
migrations.RemoveField(
|
||||||
|
model_name='accountbackupplan',
|
||||||
|
name='types',
|
||||||
|
),
|
||||||
|
]
|
|
@ -12,66 +12,17 @@ from orgs.mixins.models import OrgModelMixin
|
||||||
from ops.mixin import PeriodTaskModelMixin
|
from ops.mixin import PeriodTaskModelMixin
|
||||||
from common.utils import get_logger
|
from common.utils import get_logger
|
||||||
from common.db.encoder import ModelJSONFieldEncoder
|
from common.db.encoder import ModelJSONFieldEncoder
|
||||||
from common.db.models import BitOperationChoice
|
|
||||||
from common.mixins.models import CommonModelMixin
|
from common.mixins.models import CommonModelMixin
|
||||||
from common.const.choices import Trigger
|
from common.const.choices import Trigger
|
||||||
from ..const import AllTypes, Category
|
|
||||||
|
|
||||||
__all__ = ['AccountBackupPlan', 'AccountBackupPlanExecution', 'Type']
|
__all__ = ['AccountBackupPlan', 'AccountBackupPlanExecution']
|
||||||
|
|
||||||
logger = get_logger(__file__)
|
logger = get_logger(__file__)
|
||||||
|
|
||||||
|
|
||||||
def _choice_map(default=None):
|
|
||||||
offset = 0
|
|
||||||
temp_key = 0b1
|
|
||||||
|
|
||||||
if default is None:
|
|
||||||
_all = (0b1 << 32) - 1
|
|
||||||
else:
|
|
||||||
_all = default
|
|
||||||
|
|
||||||
choices = {
|
|
||||||
_all: ('all', 'All')
|
|
||||||
}
|
|
||||||
|
|
||||||
for info in AllTypes.grouped_choices_to_objs():
|
|
||||||
temp_keys = []
|
|
||||||
for c in info['children']:
|
|
||||||
key = temp_key << offset
|
|
||||||
temp_keys.append(key)
|
|
||||||
choices[key] = (c['value'], c['display_name'])
|
|
||||||
offset += 1
|
|
||||||
parent_key = reduce(lambda x, y: x | y, temp_keys)
|
|
||||||
choices[parent_key] = (info['value'], info['display_name'])
|
|
||||||
return choices
|
|
||||||
|
|
||||||
|
|
||||||
class Type(BitOperationChoice):
|
|
||||||
NONE = 0
|
|
||||||
|
|
||||||
ALL = (0b1 << 32) - 1
|
|
||||||
TYPE_MAP = _choice_map(ALL)
|
|
||||||
|
|
||||||
DB_CHOICES = tuple((k, v[1]) for k, v in TYPE_MAP.items())
|
|
||||||
|
|
||||||
NAME_MAP = {k: v[0] for k, v in TYPE_MAP.items()}
|
|
||||||
|
|
||||||
NAME_MAP_REVERSE = {v: k for k, v in NAME_MAP.items()}
|
|
||||||
CHOICES = []
|
|
||||||
for i, j in DB_CHOICES:
|
|
||||||
CHOICES.append((NAME_MAP[i], j))
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def get_types(cls, value: int) -> list:
|
|
||||||
exclude_types = ['all'] + Category.values
|
|
||||||
current_all = cls.value_to_choices(value)
|
|
||||||
return list(filter(lambda x: x not in exclude_types, current_all))
|
|
||||||
|
|
||||||
|
|
||||||
class AccountBackupPlan(CommonModelMixin, PeriodTaskModelMixin, OrgModelMixin):
|
class AccountBackupPlan(CommonModelMixin, PeriodTaskModelMixin, OrgModelMixin):
|
||||||
id = models.UUIDField(default=uuid.uuid4, primary_key=True)
|
id = models.UUIDField(default=uuid.uuid4, primary_key=True)
|
||||||
types = models.BigIntegerField()
|
categories = models.JSONField(default=list)
|
||||||
recipients = models.ManyToManyField(
|
recipients = models.ManyToManyField(
|
||||||
'users.User', related_name='recipient_escape_route_plans', blank=True,
|
'users.User', related_name='recipient_escape_route_plans', blank=True,
|
||||||
verbose_name=_("Recipient")
|
verbose_name=_("Recipient")
|
||||||
|
@ -102,7 +53,7 @@ class AccountBackupPlan(CommonModelMixin, PeriodTaskModelMixin, OrgModelMixin):
|
||||||
'crontab': self.crontab,
|
'crontab': self.crontab,
|
||||||
'org_id': self.org_id,
|
'org_id': self.org_id,
|
||||||
'created_by': self.created_by,
|
'created_by': self.created_by,
|
||||||
'types': Type.get_types(self.types),
|
'categories': self.categories,
|
||||||
'recipients': {
|
'recipients': {
|
||||||
str(recipient.id): (str(recipient), bool(recipient.secret_key))
|
str(recipient.id): (str(recipient), bool(recipient.secret_key))
|
||||||
for recipient in self.recipients.all()
|
for recipient in self.recipients.all()
|
||||||
|
@ -149,9 +100,9 @@ class AccountBackupPlanExecution(OrgModelMixin):
|
||||||
verbose_name = _('Account backup execution')
|
verbose_name = _('Account backup execution')
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def types(self):
|
def categories(self):
|
||||||
types = self.plan_snapshot.get('types')
|
categories = self.plan_snapshot.get('categories')
|
||||||
return types
|
return categories
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def recipients(self):
|
def recipients(self):
|
||||||
|
|
|
@ -7,8 +7,6 @@ from orgs.mixins.serializers import BulkOrgResourceModelSerializer
|
||||||
from ops.mixin import PeriodTaskSerializerMixin
|
from ops.mixin import PeriodTaskSerializerMixin
|
||||||
from common.utils import get_logger
|
from common.utils import get_logger
|
||||||
|
|
||||||
from .base import TypesField
|
|
||||||
|
|
||||||
from ..models import AccountBackupPlan, AccountBackupPlanExecution
|
from ..models import AccountBackupPlan, AccountBackupPlanExecution
|
||||||
|
|
||||||
logger = get_logger(__file__)
|
logger = get_logger(__file__)
|
||||||
|
@ -17,14 +15,12 @@ __all__ = ['AccountBackupPlanSerializer', 'AccountBackupPlanExecutionSerializer'
|
||||||
|
|
||||||
|
|
||||||
class AccountBackupPlanSerializer(PeriodTaskSerializerMixin, BulkOrgResourceModelSerializer):
|
class AccountBackupPlanSerializer(PeriodTaskSerializerMixin, BulkOrgResourceModelSerializer):
|
||||||
types = TypesField(required=False, allow_null=True, label=_("Actions"))
|
|
||||||
|
|
||||||
class Meta:
|
class Meta:
|
||||||
model = AccountBackupPlan
|
model = AccountBackupPlan
|
||||||
fields = [
|
fields = [
|
||||||
'id', 'name', 'is_periodic', 'interval', 'crontab', 'date_created',
|
'id', 'name', 'is_periodic', 'interval', 'crontab', 'date_created',
|
||||||
'date_updated', 'created_by', 'periodic_display', 'comment',
|
'date_updated', 'created_by', 'periodic_display', 'comment',
|
||||||
'recipients', 'types'
|
'recipients', 'categories'
|
||||||
]
|
]
|
||||||
extra_kwargs = {
|
extra_kwargs = {
|
||||||
'name': {'required': True},
|
'name': {'required': True},
|
||||||
|
|
|
@ -7,7 +7,6 @@ from rest_framework import serializers
|
||||||
|
|
||||||
from common.utils import ssh_pubkey_gen, ssh_private_key_gen, validate_ssh_private_key
|
from common.utils import ssh_pubkey_gen, ssh_private_key_gen, validate_ssh_private_key
|
||||||
from common.drf.fields import EncryptedField
|
from common.drf.fields import EncryptedField
|
||||||
from assets.models import Type
|
|
||||||
from .utils import validate_password_for_ansible
|
from .utils import validate_password_for_ansible
|
||||||
|
|
||||||
|
|
||||||
|
@ -71,24 +70,3 @@ class AuthValidateMixin(serializers.Serializer):
|
||||||
def update(self, instance, validated_data):
|
def update(self, instance, validated_data):
|
||||||
self.clean_auth_fields(validated_data)
|
self.clean_auth_fields(validated_data)
|
||||||
return super().update(instance, validated_data)
|
return super().update(instance, validated_data)
|
||||||
|
|
||||||
|
|
||||||
class TypesField(serializers.MultipleChoiceField):
|
|
||||||
def __init__(self, **kwargs):
|
|
||||||
kwargs['choices'] = Type.CHOICES
|
|
||||||
super().__init__(**kwargs)
|
|
||||||
|
|
||||||
def to_representation(self, value):
|
|
||||||
return Type.value_to_choices(value)
|
|
||||||
|
|
||||||
def to_internal_value(self, data):
|
|
||||||
if data is None:
|
|
||||||
return data
|
|
||||||
return Type.choices_to_value(data)
|
|
||||||
|
|
||||||
|
|
||||||
class ActionsDisplayField(TypesField):
|
|
||||||
def to_representation(self, value):
|
|
||||||
values = super().to_representation(value)
|
|
||||||
choices = dict(Type.CHOICES)
|
|
||||||
return [choices.get(i) for i in values]
|
|
||||||
|
|
|
@ -7,7 +7,8 @@ from django.conf import settings
|
||||||
from django.db.models import F
|
from django.db.models import F
|
||||||
from rest_framework import serializers
|
from rest_framework import serializers
|
||||||
|
|
||||||
from assets.models import Account, Type
|
from assets.models import Account
|
||||||
|
from assets.const import AllTypes
|
||||||
from assets.serializers import AccountSecretSerializer
|
from assets.serializers import AccountSecretSerializer
|
||||||
from assets.notifications import AccountBackupExecutionTaskMsg
|
from assets.notifications import AccountBackupExecutionTaskMsg
|
||||||
from users.models import User
|
from users.models import User
|
||||||
|
@ -76,25 +77,29 @@ class AssetAccountHandler(BaseAccountHandler):
|
||||||
return filename
|
return filename
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def create_data_map(cls, types: list):
|
def create_data_map(cls, categories: list):
|
||||||
data_map = defaultdict(list)
|
data_map = defaultdict(list)
|
||||||
|
|
||||||
# TODO 可以优化一下查询 在账号上做type的缓存 避免数据量大时连表操作
|
# TODO 可以优化一下查询 在账号上做 category 的缓存 避免数据量大时连表操作
|
||||||
qs = Account.objects.filter(
|
qs = Account.objects.filter(
|
||||||
asset__platform__type__in=types
|
asset__platform__category__in=categories
|
||||||
).annotate(type=F('asset__platform__type'))
|
).annotate(category=F('asset__platform__category'))
|
||||||
if not qs.exists():
|
if not qs.exists():
|
||||||
return data_map
|
return data_map
|
||||||
|
|
||||||
type_dict = dict(Type.CHOICES)
|
category_dict = {}
|
||||||
|
for i in AllTypes.grouped_choices_to_objs():
|
||||||
|
for j in i['children']:
|
||||||
|
category_dict[j['value']] = j['display_name']
|
||||||
|
|
||||||
header_fields = cls.get_header_fields(AccountSecretSerializer(qs.first()))
|
header_fields = cls.get_header_fields(AccountSecretSerializer(qs.first()))
|
||||||
account_type_map = defaultdict(list)
|
account_category_map = defaultdict(list)
|
||||||
for account in qs:
|
for account in qs:
|
||||||
account_type_map[account.type].append(account)
|
account_category_map[account.category].append(account)
|
||||||
|
|
||||||
data_map = {}
|
data_map = {}
|
||||||
for tp, accounts in account_type_map.items():
|
for category, accounts in account_category_map.items():
|
||||||
sheet_name = type_dict[tp]
|
sheet_name = category_dict.get(category, category)
|
||||||
data = AccountSecretSerializer(accounts, many=True).data
|
data = AccountSecretSerializer(accounts, many=True).data
|
||||||
data_map.update(cls.add_rows(data, header_fields, sheet_name))
|
data_map.update(cls.add_rows(data, header_fields, sheet_name))
|
||||||
|
|
||||||
|
@ -117,9 +122,9 @@ class AccountBackupHandler:
|
||||||
# Print task start date
|
# Print task start date
|
||||||
time_start = time.time()
|
time_start = time.time()
|
||||||
files = []
|
files = []
|
||||||
types = self.execution.types
|
categories = self.execution.categories
|
||||||
|
|
||||||
data_map = AssetAccountHandler.create_data_map(types)
|
data_map = AssetAccountHandler.create_data_map(categories)
|
||||||
if not data_map:
|
if not data_map:
|
||||||
return files
|
return files
|
||||||
|
|
||||||
|
|
Loading…
Reference in New Issue