perf: 优化账号备份

pull/8733/head
feng626 2022-09-01 16:31:20 +08:00 committed by Jiangjie.Bai
parent f0dc519423
commit ef4cc5f646
3 changed files with 139 additions and 59 deletions

View File

@ -16,7 +16,7 @@ from .. import const
__all__ = [ __all__ = [
'AppSerializer', 'MiniAppSerializer', 'AppSerializerMixin', 'AppSerializer', 'MiniAppSerializer', 'AppSerializerMixin',
'AppAccountSerializer', 'AppAccountSecretSerializer' 'AppAccountSerializer', 'AppAccountSecretSerializer', 'AppAccountBackUpSerializer'
] ]
@ -32,21 +32,23 @@ class AppSerializerMixin(serializers.Serializer):
return instance return instance
def get_attrs_serializer(self): def get_attrs_serializer(self):
default_serializer = serializers.Serializer(read_only=True)
instance = self.app instance = self.app
if instance: tp = getattr(self, 'tp', None)
_type = instance.type default_serializer = serializers.Serializer(read_only=True)
_category = instance.category if not tp:
else: if instance:
_type = self.context['request'].query_params.get('type') tp = instance.type
_category = self.context['request'].query_params.get('category') category = instance.category
if _type:
if isinstance(self, AppAccountSecretSerializer):
serializer_class = type_secret_serializer_classes_mapping.get(_type)
else: else:
serializer_class = type_serializer_classes_mapping.get(_type) tp = self.context['request'].query_params.get('type')
elif _category: category = self.context['request'].query_params.get('category')
serializer_class = category_serializer_classes_mapping.get(_category) if tp:
if isinstance(self, AppAccountBackUpSerializer):
serializer_class = type_secret_serializer_classes_mapping.get(tp)
else:
serializer_class = type_serializer_classes_mapping.get(tp)
elif category:
serializer_class = category_serializer_classes_mapping.get(category)
else: else:
serializer_class = default_serializer serializer_class = default_serializer
@ -154,11 +156,6 @@ class AppAccountSerializer(AppSerializerMixin, AuthSerializerMixin, BulkOrgResou
class AppAccountSecretSerializer(SecretReadableMixin, AppAccountSerializer): class AppAccountSecretSerializer(SecretReadableMixin, AppAccountSerializer):
class Meta(AppAccountSerializer.Meta): class Meta(AppAccountSerializer.Meta):
fields_backup = [
'id', 'app_display', 'attrs', 'username', 'password', 'private_key',
'public_key', 'date_created', 'date_updated', 'version'
]
extra_kwargs = { extra_kwargs = {
'password': {'write_only': False}, 'password': {'write_only': False},
'private_key': {'write_only': False}, 'private_key': {'write_only': False},
@ -166,3 +163,22 @@ class AppAccountSecretSerializer(SecretReadableMixin, AppAccountSerializer):
'app_display': {'label': _('Application display')}, 'app_display': {'label': _('Application display')},
'systemuser_display': {'label': _('System User')} 'systemuser_display': {'label': _('System User')}
} }
class AppAccountBackUpSerializer(AppAccountSecretSerializer):
class Meta(AppAccountSecretSerializer.Meta):
fields = [
'id', 'app_display', 'attrs', 'username', 'password', 'private_key',
'public_key', 'date_created', 'date_updated', 'version'
]
def __init__(self, *args, **kwargs):
self.tp = kwargs.pop('tp', None)
super().__init__(*args, **kwargs)
@classmethod
def setup_eager_loading(cls, queryset):
return queryset
def to_representation(self, instance):
return super(AppAccountSerializer, self).to_representation(instance)

View File

@ -76,10 +76,6 @@ class AccountSerializer(AuthSerializerMixin, BulkOrgResourceModelSerializer):
class AccountSecretSerializer(SecretReadableMixin, AccountSerializer): class AccountSecretSerializer(SecretReadableMixin, AccountSerializer):
class Meta(AccountSerializer.Meta): class Meta(AccountSerializer.Meta):
fields_backup = [
'hostname', 'ip', 'platform', 'protocols', 'username', 'password',
'private_key', 'public_key', 'date_created', 'date_updated', 'version'
]
extra_kwargs = { extra_kwargs = {
'password': {'write_only': False}, 'password': {'write_only': False},
'private_key': {'write_only': False}, 'private_key': {'write_only': False},
@ -88,6 +84,22 @@ class AccountSecretSerializer(SecretReadableMixin, AccountSerializer):
} }
class AccountBackUpSerializer(AccountSecretSerializer):
class Meta(AccountSecretSerializer.Meta):
fields = [
'id', 'hostname', 'ip', 'username', 'password',
'private_key', 'public_key', 'date_created',
'date_updated', 'version'
]
@classmethod
def setup_eager_loading(cls, queryset):
return queryset
def to_representation(self, instance):
return super(AccountSerializer, self).to_representation(instance)
class AccountTaskSerializer(serializers.Serializer): class AccountTaskSerializer(serializers.Serializer):
ACTION_CHOICES = ( ACTION_CHOICES = (
('test', 'test'), ('test', 'test'),

View File

@ -4,15 +4,16 @@ from openpyxl import Workbook
from collections import defaultdict, OrderedDict from collections import defaultdict, OrderedDict
from django.conf import settings from django.conf import settings
from django.db.models import F
from django.utils.translation import ugettext_lazy as _ from django.utils.translation import ugettext_lazy as _
from rest_framework import serializers from rest_framework import serializers
from assets.models import AuthBook from assets.models import AuthBook, SystemUser, Asset
from assets.serializers import AccountSecretSerializer from assets.serializers import AccountBackUpSerializer
from assets.notifications import AccountBackupExecutionTaskMsg from assets.notifications import AccountBackupExecutionTaskMsg
from applications.models import Account from applications.models import Account, Application
from applications.const import AppType from applications.const import AppType
from applications.serializers import AppAccountSecretSerializer from applications.serializers import AppAccountBackUpSerializer
from users.models import User from users.models import User
from common.utils import get_logger from common.utils import get_logger
from common.utils.timezone import local_now_display from common.utils.timezone import local_now_display
@ -38,7 +39,7 @@ class BaseAccountHandler:
@classmethod @classmethod
def get_header_fields(cls, serializer: serializers.Serializer): def get_header_fields(cls, serializer: serializers.Serializer):
try: try:
backup_fields = getattr(serializer, 'Meta').fields_backup backup_fields = getattr(serializer, 'Meta').fields
except AttributeError: except AttributeError:
backup_fields = serializer.fields.keys() backup_fields = serializer.fields.keys()
header_fields = {} header_fields = {}
@ -51,17 +52,41 @@ class BaseAccountHandler:
header_fields[field] = str(v.label) header_fields[field] = str(v.label)
return header_fields return header_fields
@staticmethod
def load_auth(tp, value, system_user):
if value:
return value
if system_user:
return getattr(system_user, tp, '')
return ''
@classmethod @classmethod
def create_row(cls, account, serializer_cls, header_fields=None): def replace_auth(cls, account, system_user_dict):
serializer = serializer_cls(account) system_user = system_user_dict.get(account.systemuser_id)
if not header_fields: account.username = cls.load_auth('username', account.username, system_user)
header_fields = cls.get_header_fields(serializer) account.password = cls.load_auth('password', account.password, system_user)
data = cls.unpack_data(serializer.data) account.private_key = cls.load_auth('private_key', account.private_key, system_user)
account.public_key = cls.load_auth('public_key', account.public_key, system_user)
return account
@classmethod
def create_row(cls, data, header_fields):
data = cls.unpack_data(data)
row_dict = {} row_dict = {}
for field, header_name in header_fields.items(): for field, header_name in header_fields.items():
row_dict[header_name] = str(data[field]) row_dict[header_name] = str(data.get(field, field))
return row_dict return row_dict
@classmethod
def add_rows(cls, data, header_fields, sheet):
data_map = defaultdict(list)
for i in data:
row = cls.create_row(i, header_fields)
if sheet not in data_map:
data_map[sheet].append(list(row.keys()))
data_map[sheet].append(list(row.values()))
return data_map
class AssetAccountHandler(BaseAccountHandler): class AssetAccountHandler(BaseAccountHandler):
@staticmethod @staticmethod
@ -72,22 +97,27 @@ class AssetAccountHandler(BaseAccountHandler):
return filename return filename
@classmethod @classmethod
def create_data_map(cls): def replace_account_info(cls, account, asset_dict, system_user_dict):
data_map = defaultdict(list) asset = asset_dict.get(account.asset_id)
account.ip = asset.ip if asset else ''
account.hostname = asset.hostname if asset else ''
account = cls.replace_auth(account, system_user_dict)
return account
@classmethod
def create_data_map(cls, system_user_dict):
sheet_name = AuthBook._meta.verbose_name sheet_name = AuthBook._meta.verbose_name
assets = Asset.objects.only('id', 'hostname', 'ip')
asset_dict = {asset.id: asset for asset in assets}
accounts = AuthBook.objects.all()
if not accounts.exists():
return
accounts = AuthBook.get_queryset().select_related('systemuser') header_fields = cls.get_header_fields(AccountBackUpSerializer(accounts.first()))
if not accounts.first():
return data_map
header_fields = cls.get_header_fields(AccountSecretSerializer(accounts.first()))
for account in accounts: for account in accounts:
account.load_auth() cls.replace_account_info(account, asset_dict, system_user_dict)
row = cls.create_row(account, AccountSecretSerializer, header_fields) data = AccountBackUpSerializer(accounts, many=True).data
if sheet_name not in data_map: data_map = cls.add_rows(data, header_fields, sheet_name)
data_map[sheet_name].append(list(row.keys()))
data_map[sheet_name].append(list(row.values()))
logger.info('\n\033[33m- 共收集 {} 条资产账号\033[0m'.format(accounts.count())) logger.info('\n\033[33m- 共收集 {} 条资产账号\033[0m'.format(accounts.count()))
return data_map return data_map
@ -101,18 +131,36 @@ class AppAccountHandler(BaseAccountHandler):
return filename return filename
@classmethod @classmethod
def create_data_map(cls): def replace_account_info(cls, account, app_dict, system_user_dict):
data_map = defaultdict(list) app = app_dict.get(account.app_id)
accounts = Account.get_queryset().select_related('systemuser') account.type = app.type if app else ''
for account in accounts: account.app_display = app.name if app else ''
account.load_auth() account.category = app.category if app else ''
app_type = account.type account = cls.replace_auth(account, system_user_dict)
return account
@classmethod
def create_data_map(cls, system_user_dict):
apps = Application.objects.only('id', 'type', 'name', 'category')
app_dict = {app.id: app for app in apps}
qs = Account.objects.all().annotate(app_type=F('app__type'))
if not qs.exists():
return
account_type_map = defaultdict(list)
for i in qs:
account_type_map[i.app_type].append(i)
data_map = {}
for app_type, accounts in account_type_map.items():
sheet_name = AppType.get_label(app_type) sheet_name = AppType.get_label(app_type)
row = cls.create_row(account, AppAccountSecretSerializer) header_fields = cls.get_header_fields(AppAccountBackUpSerializer(tp=app_type))
if sheet_name not in data_map: if not accounts:
data_map[sheet_name].append(list(row.keys())) continue
data_map[sheet_name].append(list(row.values())) for account in accounts:
logger.info('\n\033[33m- 共收集{}条应用账号\033[0m'.format(accounts.count())) cls.replace_account_info(account, app_dict, system_user_dict)
data = AppAccountBackUpSerializer(accounts, many=True, app_type=app_type).data
data_map.update(cls.add_rows(data, header_fields, sheet_name))
logger.info('\n\033[33m- 共收集{}条应用账号\033[0m'.format(qs.count()))
return data_map return data_map
@ -137,12 +185,16 @@ class AccountBackupHandler:
# Print task start date # Print task start date
time_start = time.time() time_start = time.time()
files = [] files = []
system_user_qs = SystemUser.objects.only(
'id', 'username', 'password', 'private_key', 'public_key'
)
system_user_dict = {i.id: i for i in system_user_qs}
for account_type in self.execution.types: for account_type in self.execution.types:
handler = handler_map.get(account_type) handler = handler_map.get(account_type)
if not handler: if not handler:
continue continue
data_map = handler.create_data_map() data_map = handler.create_data_map(system_user_dict)
if not data_map: if not data_map:
continue continue