mirror of https://github.com/jumpserver/jumpserver
Merge branch 'v3' of github.com:jumpserver/jumpserver into v3
commit
60d07cb3e1
|
@ -12,7 +12,8 @@ from orgs.mixins import generics
|
|||
from assets import serializers
|
||||
from assets.models import Asset, Gateway
|
||||
from assets.tasks import (
|
||||
update_assets_hardware_info_manual, test_assets_connectivity_manual,
|
||||
test_assets_connectivity_manual,
|
||||
update_assets_hardware_info_manual,
|
||||
)
|
||||
from assets.filters import NodeFilterBackend, LabelFilterBackend, IpInFilterBackend
|
||||
from ..mixin import NodeFilterMixin
|
||||
|
@ -78,12 +79,10 @@ class AssetViewSet(SuggestionMixin, NodeFilterMixin, OrgBulkModelViewSet):
|
|||
class AssetsTaskMixin:
|
||||
def perform_assets_task(self, serializer):
|
||||
data = serializer.validated_data
|
||||
action = data['action']
|
||||
assets = data.get('assets', [])
|
||||
if action == "refresh":
|
||||
if data['action'] == "refresh":
|
||||
task = update_assets_hardware_info_manual.delay(assets)
|
||||
else:
|
||||
# action == 'test':
|
||||
task = test_assets_connectivity_manual.delay(assets)
|
||||
return task
|
||||
|
||||
|
|
|
@ -4,11 +4,11 @@ from collections import namedtuple, defaultdict
|
|||
from django.core.exceptions import PermissionDenied
|
||||
|
||||
from rest_framework import status
|
||||
from rest_framework.generics import get_object_or_404
|
||||
from rest_framework.serializers import ValidationError
|
||||
from rest_framework.response import Response
|
||||
from rest_framework.decorators import action
|
||||
from django.utils.translation import ugettext_lazy as _
|
||||
from django.shortcuts import get_object_or_404, Http404
|
||||
from django.db.models.signals import m2m_changed
|
||||
|
||||
from common.const.http import POST
|
||||
|
@ -16,7 +16,7 @@ from common.exceptions import SomeoneIsDoingThis
|
|||
from common.const.signals import PRE_REMOVE, POST_REMOVE
|
||||
from common.mixins.api import SuggestionMixin
|
||||
from assets.models import Asset
|
||||
from common.utils import get_logger, get_object_or_none
|
||||
from common.utils import get_logger
|
||||
from common.tree import TreeNodeSerializer
|
||||
from orgs.mixins.api import OrgBulkModelViewSet
|
||||
from orgs.mixins import generics
|
||||
|
@ -339,7 +339,7 @@ class NodeTaskCreateApi(generics.CreateAPIView):
|
|||
|
||||
def get_object(self):
|
||||
node_id = self.kwargs.get('pk')
|
||||
node = get_object_or_none(self.model, id=node_id)
|
||||
node = get_object_or_404(self.model, id=node_id)
|
||||
return node
|
||||
|
||||
@staticmethod
|
||||
|
@ -361,8 +361,6 @@ class NodeTaskCreateApi(generics.CreateAPIView):
|
|||
task = self.refresh_nodes_cache()
|
||||
self.set_serializer_data(serializer, task)
|
||||
return
|
||||
if node is None:
|
||||
raise Http404()
|
||||
if action == "refresh":
|
||||
task = update_node_assets_hardware_info_manual.delay(node)
|
||||
else:
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
from common.utils import get_logger
|
||||
from assets.const import AutomationTypes
|
||||
from assets.const import AutomationTypes, Connectivity
|
||||
from ..base.manager import BasePlaybookManager
|
||||
|
||||
logger = get_logger(__name__)
|
||||
|
@ -8,13 +8,27 @@ logger = get_logger(__name__)
|
|||
class PingManager(BasePlaybookManager):
|
||||
def __init__(self, *args, **kwargs):
|
||||
super().__init__(*args, **kwargs)
|
||||
self.host_asset_mapper = {}
|
||||
self.host_asset_and_account_mapper = {}
|
||||
|
||||
@classmethod
|
||||
def method_type(cls):
|
||||
return AutomationTypes.ping
|
||||
|
||||
def host_callback(self, host, asset=None, **kwargs):
|
||||
super().host_callback(host, asset=asset, **kwargs)
|
||||
self.host_asset_mapper[host['name']] = asset
|
||||
def host_callback(self, host, asset=None, account=None, **kwargs):
|
||||
super().host_callback(host, asset=asset, account=account, **kwargs)
|
||||
self.host_asset_and_account_mapper[host['name']] = (asset, account)
|
||||
return host
|
||||
|
||||
def on_host_success(self, host, result):
|
||||
asset, account = self.host_asset_and_account_mapper.get(host)
|
||||
asset.set_connectivity(Connectivity.ok)
|
||||
if not account:
|
||||
return
|
||||
account.set_connectivity(Connectivity.ok)
|
||||
|
||||
def on_host_error(self, host, error, result):
|
||||
asset, account = self.host_asset_and_account_mapper.get(host)
|
||||
asset.set_connectivity(Connectivity.failed)
|
||||
if not account:
|
||||
return
|
||||
account.set_connectivity(Connectivity.failed)
|
||||
|
|
|
@ -26,4 +26,16 @@ class Migration(migrations.Migration):
|
|||
name='type',
|
||||
field=models.CharField(choices=[('ping', 'Ping'), ('gather_facts', 'Gather facts'), ('push_account', 'Create account'), ('change_secret', 'Change secret'), ('verify_account', 'Verify account'), ('gather_accounts', 'Gather accounts')], max_length=16, verbose_name='Type'),
|
||||
),
|
||||
migrations.CreateModel(
|
||||
name='PingAutomation',
|
||||
fields=[
|
||||
('baseautomation_ptr',
|
||||
models.OneToOneField(auto_created=True, on_delete=django.db.models.deletion.CASCADE, parent_link=True,
|
||||
primary_key=True, serialize=False, to='assets.baseautomation')),
|
||||
],
|
||||
options={
|
||||
'verbose_name': 'Ping asset',
|
||||
},
|
||||
bases=('assets.baseautomation',),
|
||||
),
|
||||
]
|
||||
|
|
|
@ -4,3 +4,4 @@ from .push_account import *
|
|||
from .gather_facts import *
|
||||
from .gather_accounts import *
|
||||
from .verify_account import *
|
||||
from .ping import *
|
||||
|
|
|
@ -28,6 +28,15 @@ class BaseAutomation(CommonModelMixin, PeriodTaskModelMixin, OrgModelMixin):
|
|||
def __str__(self):
|
||||
return self.name + '@' + str(self.created_by)
|
||||
|
||||
@classmethod
|
||||
def generate_unique_name(cls, name):
|
||||
while True:
|
||||
name = name + str(uuid.uuid4())[:8]
|
||||
try:
|
||||
cls.objects.get(name=name)
|
||||
except cls.DoesNotExist:
|
||||
return name
|
||||
|
||||
def get_all_assets(self):
|
||||
nodes = self.nodes.all()
|
||||
node_asset_ids = Node.get_nodes_all_assets(*nodes).values_list('id', flat=True)
|
||||
|
|
|
@ -0,0 +1,15 @@
|
|||
from django.utils.translation import ugettext_lazy as _
|
||||
|
||||
from assets.const import AutomationTypes
|
||||
from .base import BaseAutomation
|
||||
|
||||
__all__ = ['PingAutomation']
|
||||
|
||||
|
||||
class PingAutomation(BaseAutomation):
|
||||
def save(self, *args, **kwargs):
|
||||
self.type = AutomationTypes.ping
|
||||
super().save(*args, **kwargs)
|
||||
|
||||
class Meta:
|
||||
verbose_name = _("Ping asset")
|
|
@ -19,14 +19,12 @@ logger = get_logger(__file__)
|
|||
|
||||
def update_asset_hardware_info_on_created(asset):
|
||||
logger.debug("Update asset `{}` hardware info".format(asset))
|
||||
# Todo:
|
||||
# update_assets_hardware_info_util.delay([asset])
|
||||
update_assets_hardware_info_util.delay([asset])
|
||||
|
||||
|
||||
def test_asset_conn_on_created(asset):
|
||||
logger.debug("Test asset `{}` connectivity".format(asset))
|
||||
# Todo:
|
||||
# test_asset_connectivity_util.delay([asset])
|
||||
test_asset_connectivity_util.delay([asset])
|
||||
|
||||
|
||||
@receiver(pre_save, sender=Node)
|
||||
|
|
|
@ -1,124 +1,50 @@
|
|||
# ~*~ coding: utf-8 ~*~
|
||||
from itertools import groupby
|
||||
from collections import defaultdict
|
||||
from celery import shared_task
|
||||
from django.utils.translation import gettext_noop
|
||||
|
||||
from common.utils import get_logger
|
||||
from orgs.utils import org_aware_func, tmp_to_root_org
|
||||
from ..models import Asset, Connectivity, Account, Node
|
||||
from . import const
|
||||
from .utils import clean_ansible_task_hosts, group_asset_by_platform
|
||||
|
||||
|
||||
logger = get_logger(__file__)
|
||||
__all__ = [
|
||||
'test_asset_connectivity_util', 'test_asset_connectivity_manual',
|
||||
'test_node_assets_connectivity_manual', 'test_assets_connectivity_manual',
|
||||
'test_asset_connectivity_util',
|
||||
'test_assets_connectivity_manual',
|
||||
'test_node_assets_connectivity_manual',
|
||||
]
|
||||
|
||||
|
||||
# Todo: 这里可能有问题了
|
||||
def set_assets_accounts_connectivity(assets, results_summary):
|
||||
asset_ids_ok = set()
|
||||
asset_ids_failed = set()
|
||||
|
||||
asset_hostnames_ok = results_summary.get('contacted', {}).keys()
|
||||
|
||||
for asset in assets:
|
||||
if asset.name in asset_hostnames_ok:
|
||||
asset_ids_ok.add(asset.id)
|
||||
else:
|
||||
asset_ids_failed.add(asset.id)
|
||||
|
||||
Asset.bulk_set_connectivity(asset_ids_ok, Connectivity.ok)
|
||||
Asset.bulk_set_connectivity(asset_ids_failed, Connectivity.failed)
|
||||
|
||||
accounts_ok = Account.objects.filter(asset_id__in=asset_ids_ok,)
|
||||
accounts_failed = Account.objects.filter(asset_id__in=asset_ids_faile)
|
||||
|
||||
Account.bulk_set_connectivity(accounts_ok, Connectivity.ok)
|
||||
Account.bulk_set_connectivity(accounts_failed, Connectivity.failed)
|
||||
|
||||
|
||||
@org_aware_func('assets')
|
||||
def test_asset_connectivity_util(assets, task_name=None):
|
||||
from ops.utils import update_or_create_ansible_task
|
||||
|
||||
from assets.models import PingAutomation
|
||||
if task_name is None:
|
||||
task_name = gettext_noop("Test assets connectivity. ")
|
||||
|
||||
hosts = clean_ansible_task_hosts(assets)
|
||||
if not hosts:
|
||||
return {}
|
||||
platform_hosts_map = {}
|
||||
hosts_sorted = sorted(hosts, key=group_asset_by_platform)
|
||||
platform_hosts = groupby(hosts_sorted, key=group_asset_by_platform)
|
||||
for i in platform_hosts:
|
||||
platform_hosts_map[i[0]] = list(i[1])
|
||||
|
||||
platform_tasks_map = {
|
||||
"unixlike": const.PING_UNIXLIKE_TASKS,
|
||||
"windows": const.PING_WINDOWS_TASKS
|
||||
task_name = PingAutomation.generate_unique_name(task_name)
|
||||
data = {
|
||||
'name': task_name,
|
||||
'comment': ', '.join([str(i) for i in assets])
|
||||
}
|
||||
results_summary = dict(
|
||||
contacted=defaultdict(dict), dark=defaultdict(dict), success=True
|
||||
)
|
||||
for platform, _hosts in platform_hosts_map.items():
|
||||
if not _hosts:
|
||||
continue
|
||||
logger.debug("System user not has special auth")
|
||||
tasks = platform_tasks_map.get(platform)
|
||||
task, created = update_or_create_ansible_task(
|
||||
task_name=task_name, hosts=_hosts, tasks=tasks,
|
||||
pattern='all', options=const.TASK_OPTIONS, run_as_admin=True,
|
||||
)
|
||||
raw, summary = task.run()
|
||||
success = summary.get('success', False)
|
||||
contacted = summary.get('contacted', {})
|
||||
dark = summary.get('dark', {})
|
||||
|
||||
results_summary['success'] &= success
|
||||
results_summary['contacted'].update(contacted)
|
||||
results_summary['dark'].update(dark)
|
||||
continue
|
||||
set_assets_accounts_connectivity(assets, results_summary)
|
||||
return results_summary
|
||||
|
||||
|
||||
@shared_task(queue="ansible")
|
||||
def test_asset_connectivity_manual(asset_id):
|
||||
asset = Asset.objects.filter(id=asset_id).first()
|
||||
if not asset:
|
||||
return
|
||||
task_name = gettext_noop("Test assets connectivity: ") + str(asset)
|
||||
summary = test_asset_connectivity_util([asset], task_name=task_name)
|
||||
|
||||
if summary.get('dark'):
|
||||
return False, summary['dark']
|
||||
else:
|
||||
return True, ""
|
||||
instance = PingAutomation.objects.create(**data)
|
||||
instance.assets.add(*assets)
|
||||
instance.execute()
|
||||
|
||||
|
||||
@shared_task(queue="ansible")
|
||||
def test_assets_connectivity_manual(asset_ids):
|
||||
from assets.models import Asset
|
||||
with tmp_to_root_org():
|
||||
assets = Asset.objects.filter(id__in=asset_ids)
|
||||
task_name = gettext_noop("Test assets connectivity: ") + str([asset.name for asset in assets])
|
||||
summary = test_asset_connectivity_util(assets, task_name=task_name)
|
||||
|
||||
if summary.get('dark'):
|
||||
return False, summary['dark']
|
||||
else:
|
||||
return True, ""
|
||||
task_name = gettext_noop("Test assets connectivity: ")
|
||||
test_asset_connectivity_util(assets, task_name=task_name)
|
||||
|
||||
|
||||
@shared_task(queue="ansible")
|
||||
def test_node_assets_connectivity_manual(node_id):
|
||||
from assets.models import Node
|
||||
with tmp_to_root_org():
|
||||
node = Node.objects.get(id=node_id)
|
||||
|
||||
task_name = gettext_noop("Test if the assets under the node are connectable: ") + node.name
|
||||
task_name = gettext_noop("Test if the assets under the node are connectable: ")
|
||||
assets = node.get_all_assets()
|
||||
result = test_asset_connectivity_util(assets, task_name=task_name)
|
||||
return result
|
||||
test_asset_connectivity_util(assets, task_name=task_name)
|
||||
|
|
|
@ -1,149 +1,50 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
#
|
||||
import json
|
||||
import re
|
||||
|
||||
from celery import shared_task
|
||||
from django.utils.translation import ugettext as _, gettext_noop
|
||||
from django.utils.translation import gettext_noop
|
||||
|
||||
from common.utils import (
|
||||
capacity_convert, sum_capacity, get_logger
|
||||
)
|
||||
from common.utils import get_logger
|
||||
from orgs.utils import org_aware_func, tmp_to_root_org
|
||||
from . import const
|
||||
from ..models import Asset, Node
|
||||
from .utils import clean_ansible_task_hosts
|
||||
|
||||
|
||||
logger = get_logger(__file__)
|
||||
disk_pattern = re.compile(r'^hd|sd|xvd|vd|nv')
|
||||
__all__ = [
|
||||
'update_assets_hardware_info_util', 'update_asset_hardware_info_manual',
|
||||
'update_assets_hardware_info_period', 'update_node_assets_hardware_info_manual',
|
||||
'update_assets_hardware_info_util',
|
||||
'update_node_assets_hardware_info_manual',
|
||||
'update_assets_hardware_info_manual',
|
||||
]
|
||||
|
||||
|
||||
def set_assets_hardware_info(assets, result, **kwargs):
|
||||
"""
|
||||
Using ops task run result, to update asset info
|
||||
|
||||
be a celery task also
|
||||
:param assets:
|
||||
:param result:
|
||||
:param kwargs: {task_name: ""}
|
||||
:return:
|
||||
"""
|
||||
result_raw = result[0]
|
||||
assets_updated = []
|
||||
success_result = result_raw.get('ok', {})
|
||||
|
||||
for asset in assets:
|
||||
hostname = asset.name
|
||||
info = success_result.get(hostname, {})
|
||||
info = info.get('setup', {}).get('ansible_facts', {})
|
||||
if not info:
|
||||
logger.error(_("Get asset info failed: {}").format(hostname))
|
||||
continue
|
||||
___vendor = info.get('ansible_system_vendor', 'Unknown')
|
||||
___model = info.get('ansible_product_name', 'Unknown')
|
||||
___sn = info.get('ansible_product_serial', 'Unknown')
|
||||
|
||||
for ___cpu_model in info.get('ansible_processor', []):
|
||||
if ___cpu_model.endswith('GHz') or ___cpu_model.startswith("Intel"):
|
||||
break
|
||||
else:
|
||||
___cpu_model = 'Unknown'
|
||||
___cpu_model = ___cpu_model[:48]
|
||||
___cpu_count = info.get('ansible_processor_count', 0)
|
||||
___cpu_cores = info.get('ansible_processor_cores', None) or \
|
||||
len(info.get('ansible_processor', []))
|
||||
___cpu_vcpus = info.get('ansible_processor_vcpus', 0)
|
||||
___memory = '%s %s' % capacity_convert(
|
||||
'{} MB'.format(info.get('ansible_memtotal_mb'))
|
||||
)
|
||||
disk_info = {}
|
||||
for dev, dev_info in info.get('ansible_devices', {}).items():
|
||||
if disk_pattern.match(dev) and dev_info['removable'] == '0':
|
||||
disk_info[dev] = dev_info['size']
|
||||
___disk_total = '%.1f %s' % sum_capacity(disk_info.values())
|
||||
___disk_info = json.dumps(disk_info)
|
||||
|
||||
# ___platform = info.get('ansible_system', 'Unknown')
|
||||
___os = info.get('ansible_distribution', 'Unknown')
|
||||
___os_version = info.get('ansible_distribution_version', 'Unknown')
|
||||
___os_arch = info.get('ansible_architecture', 'Unknown')
|
||||
___hostname_raw = info.get('ansible_hostname', 'Unknown')
|
||||
|
||||
for k, v in locals().items():
|
||||
if k.startswith('___'):
|
||||
setattr(asset, k.strip('_'), v)
|
||||
asset.save()
|
||||
assets_updated.append(asset)
|
||||
return assets_updated
|
||||
|
||||
|
||||
@org_aware_func('assets')
|
||||
def update_assets_hardware_info_util(assets, task_name=None):
|
||||
"""
|
||||
Using ansible api to update asset hardware info
|
||||
:param asset_ids: asset seq
|
||||
:param task_name: task_name running
|
||||
:return: result summary ['contacted': {}, 'dark': {}]
|
||||
"""
|
||||
|
||||
from ops.utils import update_or_create_ansible_task
|
||||
from assets.models import GatherFactsAutomation
|
||||
if task_name is None:
|
||||
task_name = gettext_noop("Update some assets hardware info. ")
|
||||
tasks = const.UPDATE_ASSETS_HARDWARE_TASKS
|
||||
hosts = clean_ansible_task_hosts(assets)
|
||||
if not hosts:
|
||||
return {}
|
||||
task, created = update_or_create_ansible_task(
|
||||
task_name, hosts=hosts, tasks=tasks,
|
||||
pattern='all', options=const.TASK_OPTIONS,
|
||||
run_as_admin=True,
|
||||
)
|
||||
result = task.run()
|
||||
set_assets_hardware_info(assets, result)
|
||||
return True
|
||||
|
||||
|
||||
@shared_task(queue="ansible")
|
||||
def update_asset_hardware_info_manual(asset_id):
|
||||
with tmp_to_root_org():
|
||||
asset = Asset.objects.filter(id=asset_id).first()
|
||||
if not asset:
|
||||
return
|
||||
task_name = gettext_noop("Update asset hardware info: ") + str(asset.name)
|
||||
update_assets_hardware_info_util([asset], task_name=task_name)
|
||||
task_name = GatherFactsAutomation.generate_unique_name(task_name)
|
||||
data = {
|
||||
'name': task_name,
|
||||
'comment': ', '.join([str(i) for i in assets])
|
||||
}
|
||||
instance = GatherFactsAutomation.objects.create(**data)
|
||||
instance.assets.add(*assets)
|
||||
instance.execute()
|
||||
|
||||
|
||||
@shared_task(queue="ansible")
|
||||
def update_assets_hardware_info_manual(asset_ids):
|
||||
task_name = gettext_noop("Update assets hardware info: ") + str([asset.name for asset in assets])
|
||||
update_assets_hardware_info_util(asset_ids, task_name=task_name)
|
||||
|
||||
|
||||
@shared_task(queue="ansible")
|
||||
def update_assets_hardware_info_period():
|
||||
"""
|
||||
Update asset hardware period task
|
||||
:return:
|
||||
"""
|
||||
if not const.PERIOD_TASK_ENABLED:
|
||||
logger.debug("Period task disabled, update assets hardware info pass")
|
||||
return
|
||||
from assets.models import Asset
|
||||
with tmp_to_root_org():
|
||||
assets = Asset.objects.filter(id__in=asset_ids)
|
||||
task_name = gettext_noop("Update assets hardware info: ")
|
||||
update_assets_hardware_info_util(assets, task_name=task_name)
|
||||
|
||||
|
||||
@shared_task(queue="ansible")
|
||||
def update_node_assets_hardware_info_manual(node_id):
|
||||
from assets.models import Node
|
||||
with tmp_to_root_org():
|
||||
node = Node.objects.filter(id=node_id).first()
|
||||
if not node:
|
||||
return
|
||||
node = Node.objects.get(id=node_id)
|
||||
|
||||
task_name = gettext_noop("Update node asset hardware information: ") + str(node.name)
|
||||
task_name = gettext_noop("Update node asset hardware information: ")
|
||||
assets = node.get_all_assets()
|
||||
result = update_assets_hardware_info_util(assets, task_name=task_name)
|
||||
return result
|
||||
update_assets_hardware_info_util(assets, task_name=task_name)
|
||||
|
|
|
@ -10,11 +10,13 @@ from orgs.utils import current_org
|
|||
from perms import serializers
|
||||
from perms import models
|
||||
from perms.utils.user_permission import UserGrantedAssetsQueryUtils
|
||||
from assets.serializers import AccountSerializer
|
||||
|
||||
__all__ = [
|
||||
'AssetPermissionUserRelationViewSet', 'AssetPermissionUserGroupRelationViewSet',
|
||||
'AssetPermissionAssetRelationViewSet', 'AssetPermissionNodeRelationViewSet',
|
||||
'AssetPermissionAllAssetListApi', 'AssetPermissionAllUserListApi',
|
||||
'AssetPermissionAccountListApi',
|
||||
]
|
||||
|
||||
|
||||
|
@ -111,3 +113,16 @@ class AssetPermissionNodeRelationViewSet(RelationMixin):
|
|||
queryset = queryset.annotate(node_key=F('node__key'))
|
||||
return queryset
|
||||
|
||||
|
||||
class AssetPermissionAccountListApi(generics.ListAPIView):
|
||||
serializer_class = AccountSerializer
|
||||
filterset_fields = ("name", "username", "privileged", "version")
|
||||
search_fields = filterset_fields
|
||||
|
||||
def get_queryset(self):
|
||||
pk = self.kwargs.get("pk")
|
||||
perm = get_object_or_404(models.AssetPermission, pk=pk)
|
||||
accounts = perm.get_all_accounts()
|
||||
return accounts
|
||||
|
||||
|
||||
|
|
|
@ -136,7 +136,7 @@ class AssetPermission(OrgModelMixin):
|
|||
q = Q(asset_id__in=asset_ids)
|
||||
if not self.is_perm_all_accounts:
|
||||
q &= Q(username__in=self.accounts)
|
||||
accounts = Account.objects.filter(q)
|
||||
accounts = Account.objects.filter(q).order_by('asset__name', 'name', 'username')
|
||||
if not flat:
|
||||
return accounts
|
||||
return accounts.values_list('id', flat=True)
|
||||
|
|
|
@ -3,9 +3,7 @@
|
|||
from rest_framework import serializers
|
||||
|
||||
from common.drf.serializers import BulkSerializerMixin
|
||||
from assets.models import Asset, Node
|
||||
from perms.models import AssetPermission
|
||||
from users.models import User
|
||||
|
||||
__all__ = [
|
||||
'AssetPermissionUserRelationSerializer',
|
||||
|
|
|
@ -84,6 +84,7 @@ permission_urlpatterns = [
|
|||
# 授权规则中授权的资产
|
||||
path('<uuid:pk>/assets/all/', api.AssetPermissionAllAssetListApi.as_view(), name='asset-permission-all-assets'),
|
||||
path('<uuid:pk>/users/all/', api.AssetPermissionAllUserListApi.as_view(), name='asset-permission-all-users'),
|
||||
path('<uuid:pk>/accounts/', api.AssetPermissionAccountListApi.as_view(), name='asset-permission-accounts'),
|
||||
]
|
||||
|
||||
asset_permission_urlpatterns = [
|
||||
|
|
|
@ -46,7 +46,7 @@ class DownloadUploadMixin:
|
|||
zp.extractall(extract_to)
|
||||
|
||||
tmp_dir = os.path.join(extract_to, file.name.replace('.zip', ''))
|
||||
files = ['manifest.yml', 'icon.png', 'i18n.yml']
|
||||
files = ['manifest.yml', 'icon.png', 'i18n.yml', 'setup.yml']
|
||||
for name in files:
|
||||
path = os.path.join(tmp_dir, name)
|
||||
if not os.path.exists(path):
|
||||
|
|
Loading…
Reference in New Issue