2018-01-30 11:57:47 +00:00
|
|
|
|
# -*- coding: utf-8 -*-
|
|
|
|
|
#
|
2019-06-27 13:43:10 +00:00
|
|
|
|
import re
|
2021-02-05 05:29:29 +00:00
|
|
|
|
import threading
|
|
|
|
|
import time
|
|
|
|
|
import uuid
|
|
|
|
|
from collections import defaultdict
|
2022-12-20 12:23:42 +00:00
|
|
|
|
|
|
|
|
|
from django.core.cache import cache
|
2018-05-14 06:21:32 +00:00
|
|
|
|
from django.db import models, transaction
|
2024-06-28 03:39:09 +00:00
|
|
|
|
from django.db.models import F, Q, Manager
|
2020-08-16 15:08:58 +00:00
|
|
|
|
from django.db.transaction import atomic
|
2023-07-24 03:52:25 +00:00
|
|
|
|
from django.utils.translation import gettext_lazy as _, gettext
|
2018-07-12 16:00:35 +00:00
|
|
|
|
|
2024-01-02 08:11:56 +00:00
|
|
|
|
from common.utils import get_logger, timeit
|
2022-12-20 12:23:42 +00:00
|
|
|
|
from common.utils.lock import DistributedLock
|
|
|
|
|
from orgs.mixins.models import OrgManager, JMSOrgBaseModel
|
2018-07-15 10:39:11 +00:00
|
|
|
|
from orgs.models import Organization
|
2022-12-20 12:23:42 +00:00
|
|
|
|
from orgs.utils import get_current_org, tmp_to_org, tmp_to_root_org
|
2018-01-30 11:57:47 +00:00
|
|
|
|
|
2021-02-05 05:29:29 +00:00
|
|
|
|
__all__ = ['Node', 'FamilyMixin', 'compute_parent_key', 'NodeQuerySet']
|
2019-09-11 13:22:25 +00:00
|
|
|
|
logger = get_logger(__name__)
|
2018-01-30 11:57:47 +00:00
|
|
|
|
|
|
|
|
|
|
2020-08-16 15:08:58 +00:00
|
|
|
|
def compute_parent_key(key):
|
|
|
|
|
try:
|
|
|
|
|
return key[:key.rindex(':')]
|
|
|
|
|
except ValueError:
|
|
|
|
|
return ''
|
2019-08-21 12:27:21 +00:00
|
|
|
|
|
2020-03-12 08:24:38 +00:00
|
|
|
|
|
2020-08-16 15:08:58 +00:00
|
|
|
|
class NodeQuerySet(models.QuerySet):
|
2021-04-29 03:30:01 +00:00
|
|
|
|
pass
|
2021-03-10 02:09:55 +00:00
|
|
|
|
|
2019-08-21 12:27:21 +00:00
|
|
|
|
|
2019-06-27 13:43:10 +00:00
|
|
|
|
class FamilyMixin:
|
2019-08-21 12:27:21 +00:00
|
|
|
|
__parents = None
|
|
|
|
|
__children = None
|
|
|
|
|
__all_children = None
|
2018-04-26 11:51:32 +00:00
|
|
|
|
is_node = True
|
2021-01-12 04:59:05 +00:00
|
|
|
|
child_mark: int
|
2018-04-10 12:29:06 +00:00
|
|
|
|
|
2019-08-23 10:23:07 +00:00
|
|
|
|
@staticmethod
|
|
|
|
|
def clean_children_keys(nodes_keys):
|
2020-09-27 08:02:44 +00:00
|
|
|
|
sort_key = lambda k: [int(i) for i in k.split(':')]
|
|
|
|
|
nodes_keys = sorted(list(nodes_keys), key=sort_key)
|
|
|
|
|
|
2019-08-23 10:23:07 +00:00
|
|
|
|
nodes_keys_clean = []
|
2020-09-27 08:02:44 +00:00
|
|
|
|
base_key = ''
|
|
|
|
|
for key in nodes_keys:
|
|
|
|
|
if key.startswith(base_key + ':'):
|
|
|
|
|
continue
|
|
|
|
|
nodes_keys_clean.append(key)
|
|
|
|
|
base_key = key
|
2019-08-23 10:23:07 +00:00
|
|
|
|
return nodes_keys_clean
|
|
|
|
|
|
2019-09-24 07:00:32 +00:00
|
|
|
|
@classmethod
|
|
|
|
|
def get_node_all_children_key_pattern(cls, key, with_self=True):
|
|
|
|
|
pattern = r'^{0}:'.format(key)
|
|
|
|
|
if with_self:
|
|
|
|
|
pattern += r'|^{0}$'.format(key)
|
|
|
|
|
return pattern
|
2018-06-01 07:34:08 +00:00
|
|
|
|
|
2023-05-18 13:34:19 +00:00
|
|
|
|
@classmethod
|
|
|
|
|
def get_nodes_children_key_pattern(cls, nodes, with_self=True):
|
|
|
|
|
keys = [i.key for i in nodes]
|
|
|
|
|
keys = cls.clean_children_keys(keys)
|
|
|
|
|
patterns = [cls.get_node_all_children_key_pattern(key) for key in keys]
|
|
|
|
|
patterns = '|'.join(patterns)
|
|
|
|
|
return patterns
|
|
|
|
|
|
|
|
|
|
@classmethod
|
|
|
|
|
def get_nodes_all_children(cls, nodes, with_self=True):
|
|
|
|
|
pattern = cls.get_nodes_children_key_pattern(nodes, with_self=with_self)
|
2024-03-07 04:37:33 +00:00
|
|
|
|
if not pattern:
|
|
|
|
|
# 如果 pattern = ''
|
|
|
|
|
# key__iregex 报错 (1139, "Got error 'empty (sub)expression' from regexp")
|
|
|
|
|
return cls.objects.none()
|
2023-05-18 13:34:19 +00:00
|
|
|
|
return Node.objects.filter(key__iregex=pattern)
|
|
|
|
|
|
2019-09-24 07:00:32 +00:00
|
|
|
|
@classmethod
|
|
|
|
|
def get_node_children_key_pattern(cls, key, with_self=True):
|
|
|
|
|
pattern = r'^{0}:[0-9]+$'.format(key)
|
2019-06-27 13:43:10 +00:00
|
|
|
|
if with_self:
|
2019-09-24 07:00:32 +00:00
|
|
|
|
pattern += r'|^{0}$'.format(key)
|
2019-09-18 14:06:46 +00:00
|
|
|
|
return pattern
|
|
|
|
|
|
2019-09-24 07:00:32 +00:00
|
|
|
|
def get_children_key_pattern(self, with_self=False):
|
|
|
|
|
return self.get_node_children_key_pattern(self.key, with_self=with_self)
|
|
|
|
|
|
|
|
|
|
def get_all_children_pattern(self, with_self=False):
|
|
|
|
|
return self.get_node_all_children_key_pattern(self.key, with_self=with_self)
|
|
|
|
|
|
|
|
|
|
def is_children(self, other):
|
|
|
|
|
children_pattern = other.get_children_key_pattern(with_self=False)
|
|
|
|
|
return re.match(children_pattern, self.key)
|
|
|
|
|
|
2019-09-18 14:06:46 +00:00
|
|
|
|
def get_children(self, with_self=False):
|
2020-08-16 15:08:58 +00:00
|
|
|
|
q = Q(parent_key=self.key)
|
|
|
|
|
if with_self:
|
|
|
|
|
q |= Q(key=self.key)
|
|
|
|
|
return Node.objects.filter(q)
|
2019-05-10 03:29:38 +00:00
|
|
|
|
|
2019-09-18 14:06:46 +00:00
|
|
|
|
def get_all_children(self, with_self=False):
|
2020-08-16 15:08:58 +00:00
|
|
|
|
q = Q(key__istartswith=f'{self.key}:')
|
|
|
|
|
if with_self:
|
|
|
|
|
q |= Q(key=self.key)
|
|
|
|
|
return Node.objects.filter(q)
|
2018-10-10 11:29:53 +00:00
|
|
|
|
|
2022-08-11 03:07:17 +00:00
|
|
|
|
@classmethod
|
|
|
|
|
def get_ancestor_queryset(cls, queryset, with_self=True):
|
|
|
|
|
parent_keys = set()
|
|
|
|
|
for i in queryset:
|
|
|
|
|
parent_keys.update(set(i.get_ancestor_keys(with_self=with_self)))
|
|
|
|
|
queryset = queryset.model.objects.filter(key__in=list(parent_keys)).distinct()
|
|
|
|
|
return queryset
|
|
|
|
|
|
2019-06-27 13:43:10 +00:00
|
|
|
|
@property
|
2019-09-24 07:00:32 +00:00
|
|
|
|
def children(self):
|
|
|
|
|
return self.get_children(with_self=False)
|
|
|
|
|
|
|
|
|
|
@property
|
|
|
|
|
def all_children(self):
|
|
|
|
|
return self.get_all_children(with_self=False)
|
2019-06-27 13:43:10 +00:00
|
|
|
|
|
2020-10-12 04:44:30 +00:00
|
|
|
|
def create_child(self, value=None, _id=None):
|
2020-08-16 15:08:58 +00:00
|
|
|
|
with atomic(savepoint=False):
|
2019-09-24 07:00:32 +00:00
|
|
|
|
child_key = self.get_next_child_key()
|
2020-10-12 04:44:30 +00:00
|
|
|
|
if value is None:
|
|
|
|
|
value = child_key
|
2019-09-24 07:00:32 +00:00
|
|
|
|
child = self.__class__.objects.create(
|
2020-11-23 09:26:06 +00:00
|
|
|
|
id=_id, key=child_key, value=value
|
2019-09-24 07:00:32 +00:00
|
|
|
|
)
|
|
|
|
|
return child
|
|
|
|
|
|
2020-07-02 10:49:17 +00:00
|
|
|
|
def get_or_create_child(self, value, _id=None):
|
|
|
|
|
"""
|
|
|
|
|
:return: Node, bool (created)
|
|
|
|
|
"""
|
|
|
|
|
children = self.get_children()
|
|
|
|
|
exist = children.filter(value=value).exists()
|
|
|
|
|
if exist:
|
|
|
|
|
child = children.filter(value=value).first()
|
|
|
|
|
created = False
|
|
|
|
|
else:
|
|
|
|
|
child = self.create_child(value, _id)
|
|
|
|
|
created = True
|
|
|
|
|
return child, created
|
|
|
|
|
|
2021-01-12 04:59:05 +00:00
|
|
|
|
def get_valid_child_mark(self):
|
|
|
|
|
key = "{}:{}".format(self.key, self.child_mark)
|
|
|
|
|
if not self.__class__.objects.filter(key=key).exists():
|
|
|
|
|
return self.child_mark
|
|
|
|
|
children_keys = self.get_children().values_list('key', flat=True)
|
|
|
|
|
children_keys_last = [key.split(':')[-1] for key in children_keys]
|
|
|
|
|
children_keys_last = [int(k) for k in children_keys_last if k.strip().isdigit()]
|
|
|
|
|
max_key_last = max(children_keys_last) if children_keys_last else 1
|
|
|
|
|
return max_key_last + 1
|
|
|
|
|
|
2019-09-24 07:00:32 +00:00
|
|
|
|
def get_next_child_key(self):
|
2021-01-12 04:59:05 +00:00
|
|
|
|
child_mark = self.get_valid_child_mark()
|
|
|
|
|
key = "{}:{}".format(self.key, child_mark)
|
|
|
|
|
self.child_mark = child_mark + 1
|
2019-09-24 07:00:32 +00:00
|
|
|
|
self.save()
|
2021-01-12 04:59:05 +00:00
|
|
|
|
return key
|
2019-09-24 07:00:32 +00:00
|
|
|
|
|
|
|
|
|
def get_next_child_preset_name(self):
|
2023-07-24 03:52:25 +00:00
|
|
|
|
name = gettext("New node")
|
2019-09-24 07:00:32 +00:00
|
|
|
|
values = [
|
|
|
|
|
child.value[child.value.rfind(' '):]
|
|
|
|
|
for child in self.get_children()
|
|
|
|
|
if child.value.startswith(name)
|
|
|
|
|
]
|
|
|
|
|
values = [int(value) for value in values if value.strip().isdigit()]
|
|
|
|
|
count = max(values) + 1 if values else 1
|
|
|
|
|
return '{} {}'.format(name, count)
|
|
|
|
|
|
|
|
|
|
# Parents
|
|
|
|
|
@classmethod
|
|
|
|
|
def get_node_ancestor_keys(cls, key, with_self=False):
|
|
|
|
|
parent_keys = []
|
|
|
|
|
key_list = key.split(":")
|
|
|
|
|
if not with_self:
|
|
|
|
|
key_list.pop()
|
|
|
|
|
for i in range(len(key_list)):
|
|
|
|
|
parent_keys.append(":".join(key_list))
|
|
|
|
|
key_list.pop()
|
|
|
|
|
return parent_keys
|
|
|
|
|
|
|
|
|
|
def get_ancestor_keys(self, with_self=False):
|
2022-12-09 05:13:02 +00:00
|
|
|
|
return self.get_node_ancestor_keys(self.key, with_self=with_self)
|
2019-09-24 07:00:32 +00:00
|
|
|
|
|
|
|
|
|
@property
|
|
|
|
|
def ancestors(self):
|
|
|
|
|
return self.get_ancestors(with_self=False)
|
|
|
|
|
|
|
|
|
|
def get_ancestors(self, with_self=False):
|
2019-09-11 13:22:25 +00:00
|
|
|
|
ancestor_keys = self.get_ancestor_keys(with_self=with_self)
|
|
|
|
|
return self.__class__.objects.filter(key__in=ancestor_keys)
|
2018-06-01 07:34:08 +00:00
|
|
|
|
|
2020-08-16 15:08:58 +00:00
|
|
|
|
def compute_parent_key(self):
|
|
|
|
|
return compute_parent_key(self.key)
|
2019-09-24 07:00:32 +00:00
|
|
|
|
|
|
|
|
|
def is_parent(self, other):
|
|
|
|
|
return other.is_children(self)
|
|
|
|
|
|
2018-02-09 07:24:44 +00:00
|
|
|
|
@property
|
2019-06-27 13:43:10 +00:00
|
|
|
|
def parent(self):
|
2019-08-23 10:23:07 +00:00
|
|
|
|
if self.is_org_root():
|
2019-06-27 13:43:10 +00:00
|
|
|
|
return self
|
2019-08-21 12:27:21 +00:00
|
|
|
|
parent_key = self.parent_key
|
|
|
|
|
return Node.objects.get(key=parent_key)
|
2019-06-27 13:43:10 +00:00
|
|
|
|
|
|
|
|
|
@parent.setter
|
|
|
|
|
def parent(self, parent):
|
|
|
|
|
if not self.is_node:
|
|
|
|
|
self.key = parent.key + ':fake'
|
|
|
|
|
return
|
|
|
|
|
children = self.get_all_children()
|
|
|
|
|
old_key = self.key
|
|
|
|
|
with transaction.atomic():
|
|
|
|
|
self.key = parent.get_next_child_key()
|
2019-08-23 10:23:07 +00:00
|
|
|
|
self.save()
|
2019-06-27 13:43:10 +00:00
|
|
|
|
for child in children:
|
|
|
|
|
child.key = child.key.replace(old_key, self.key, 1)
|
|
|
|
|
child.save()
|
|
|
|
|
|
2019-08-21 12:27:21 +00:00
|
|
|
|
def get_siblings(self, with_self=False):
|
2019-06-27 13:43:10 +00:00
|
|
|
|
key = ':'.join(self.key.split(':')[:-1])
|
|
|
|
|
pattern = r'^{}:[0-9]+$'.format(key)
|
|
|
|
|
sibling = Node.objects.filter(
|
|
|
|
|
key__regex=pattern.format(self.key)
|
|
|
|
|
)
|
|
|
|
|
if not with_self:
|
|
|
|
|
sibling = sibling.exclude(key=self.key)
|
|
|
|
|
return sibling
|
|
|
|
|
|
2020-10-30 02:16:49 +00:00
|
|
|
|
@classmethod
|
|
|
|
|
def create_node_by_full_value(cls, full_value):
|
|
|
|
|
if not full_value:
|
|
|
|
|
return []
|
|
|
|
|
nodes_family = full_value.split('/')
|
2020-11-16 09:15:58 +00:00
|
|
|
|
nodes_family = [v for v in nodes_family if v]
|
2020-10-30 02:16:49 +00:00
|
|
|
|
org_root = cls.org_root()
|
|
|
|
|
if nodes_family[0] == org_root.value:
|
|
|
|
|
nodes_family = nodes_family[1:]
|
|
|
|
|
return cls.create_nodes_recurse(nodes_family, org_root)
|
|
|
|
|
|
|
|
|
|
@classmethod
|
|
|
|
|
def create_nodes_recurse(cls, values, parent=None):
|
2020-11-16 09:15:58 +00:00
|
|
|
|
values = [v for v in values if v]
|
2020-10-30 02:16:49 +00:00
|
|
|
|
if not values:
|
|
|
|
|
return None
|
|
|
|
|
if parent is None:
|
|
|
|
|
parent = cls.org_root()
|
|
|
|
|
value = values[0]
|
|
|
|
|
child, created = parent.get_or_create_child(value=value)
|
|
|
|
|
if len(values) == 1:
|
|
|
|
|
return child
|
|
|
|
|
return cls.create_nodes_recurse(values[1:], child)
|
|
|
|
|
|
2019-06-27 13:43:10 +00:00
|
|
|
|
def get_family(self):
|
2019-09-24 07:00:32 +00:00
|
|
|
|
ancestors = self.get_ancestors()
|
2019-06-27 13:43:10 +00:00
|
|
|
|
children = self.get_all_children()
|
2019-09-24 07:00:32 +00:00
|
|
|
|
return [*tuple(ancestors), self, *tuple(children)]
|
2019-08-28 03:43:55 +00:00
|
|
|
|
|
2019-06-27 13:43:10 +00:00
|
|
|
|
|
2021-02-05 05:29:29 +00:00
|
|
|
|
class NodeAllAssetsMappingMixin:
|
|
|
|
|
# { org_id: { node_key: [ asset1_id, asset2_id ] } }
|
|
|
|
|
orgid_nodekey_assetsid_mapping = defaultdict(dict)
|
2021-03-10 02:09:55 +00:00
|
|
|
|
locks_for_get_mapping_from_cache = defaultdict(threading.Lock)
|
|
|
|
|
|
|
|
|
|
@classmethod
|
|
|
|
|
def get_lock(cls, org_id):
|
|
|
|
|
lock = cls.locks_for_get_mapping_from_cache[str(org_id)]
|
|
|
|
|
return lock
|
2021-02-05 05:29:29 +00:00
|
|
|
|
|
|
|
|
|
@classmethod
|
2021-03-08 02:08:51 +00:00
|
|
|
|
def get_node_all_asset_ids_mapping(cls, org_id):
|
|
|
|
|
_mapping = cls.get_node_all_asset_ids_mapping_from_memory(org_id)
|
2021-02-05 05:29:29 +00:00
|
|
|
|
if _mapping:
|
|
|
|
|
return _mapping
|
|
|
|
|
|
2021-03-10 02:09:55 +00:00
|
|
|
|
with cls.get_lock(org_id):
|
|
|
|
|
_mapping = cls.get_node_all_asset_ids_mapping_from_cache_or_generate_to_cache(org_id)
|
|
|
|
|
cls.set_node_all_asset_ids_mapping_to_memory(org_id, mapping=_mapping)
|
2021-02-05 05:29:29 +00:00
|
|
|
|
return _mapping
|
|
|
|
|
|
|
|
|
|
# from memory
|
|
|
|
|
@classmethod
|
2021-03-08 02:08:51 +00:00
|
|
|
|
def get_node_all_asset_ids_mapping_from_memory(cls, org_id):
|
2021-02-05 05:29:29 +00:00
|
|
|
|
mapping = cls.orgid_nodekey_assetsid_mapping.get(org_id, {})
|
|
|
|
|
return mapping
|
|
|
|
|
|
|
|
|
|
@classmethod
|
2021-03-08 02:08:51 +00:00
|
|
|
|
def set_node_all_asset_ids_mapping_to_memory(cls, org_id, mapping):
|
2021-02-05 05:29:29 +00:00
|
|
|
|
cls.orgid_nodekey_assetsid_mapping[org_id] = mapping
|
|
|
|
|
|
|
|
|
|
@classmethod
|
2023-02-09 12:48:25 +00:00
|
|
|
|
def expire_node_all_asset_ids_memory_mapping(cls, org_id):
|
2021-02-05 05:29:29 +00:00
|
|
|
|
org_id = str(org_id)
|
|
|
|
|
cls.orgid_nodekey_assetsid_mapping.pop(org_id, None)
|
|
|
|
|
|
2021-04-12 08:35:03 +00:00
|
|
|
|
@classmethod
|
2023-02-09 12:48:25 +00:00
|
|
|
|
def expire_all_orgs_node_all_asset_ids_memory_mapping(cls):
|
2021-04-12 08:35:03 +00:00
|
|
|
|
orgs = Organization.objects.all()
|
|
|
|
|
org_ids = [str(org.id) for org in orgs]
|
|
|
|
|
org_ids.append(Organization.ROOT_ID)
|
|
|
|
|
|
2023-02-09 12:48:25 +00:00
|
|
|
|
for i in org_ids:
|
|
|
|
|
cls.expire_node_all_asset_ids_memory_mapping(i)
|
2021-04-12 08:35:03 +00:00
|
|
|
|
|
2021-02-05 05:29:29 +00:00
|
|
|
|
# get order: from memory -> (from cache -> to generate)
|
|
|
|
|
@classmethod
|
2021-03-08 02:08:51 +00:00
|
|
|
|
def get_node_all_asset_ids_mapping_from_cache_or_generate_to_cache(cls, org_id):
|
|
|
|
|
mapping = cls.get_node_all_asset_ids_mapping_from_cache(org_id)
|
2021-02-05 05:29:29 +00:00
|
|
|
|
if mapping:
|
|
|
|
|
return mapping
|
|
|
|
|
|
2021-03-08 02:08:51 +00:00
|
|
|
|
lock_key = f'KEY_LOCK_GENERATE_ORG_{org_id}_NODE_ALL_ASSET_ids_MAPPING'
|
2021-02-05 05:29:29 +00:00
|
|
|
|
with DistributedLock(lock_key):
|
|
|
|
|
# 这里使用无限期锁,原因是如果这里卡住了,就卡在数据库了,说明
|
|
|
|
|
# 数据库繁忙,所以不应该再有线程执行这个操作,使数据库忙上加忙
|
|
|
|
|
|
2021-03-08 02:08:51 +00:00
|
|
|
|
_mapping = cls.get_node_all_asset_ids_mapping_from_cache(org_id)
|
2021-02-05 05:29:29 +00:00
|
|
|
|
if _mapping:
|
|
|
|
|
return _mapping
|
|
|
|
|
|
2021-03-08 02:08:51 +00:00
|
|
|
|
_mapping = cls.generate_node_all_asset_ids_mapping(org_id)
|
2023-02-09 12:48:25 +00:00
|
|
|
|
cache_key = cls._get_cache_key_for_node_all_asset_ids_mapping(org_id)
|
|
|
|
|
cache.set(cache_key, mapping, timeout=None)
|
2021-02-05 05:29:29 +00:00
|
|
|
|
return _mapping
|
|
|
|
|
|
|
|
|
|
@classmethod
|
2021-03-08 02:08:51 +00:00
|
|
|
|
def get_node_all_asset_ids_mapping_from_cache(cls, org_id):
|
|
|
|
|
cache_key = cls._get_cache_key_for_node_all_asset_ids_mapping(org_id)
|
2021-02-05 05:29:29 +00:00
|
|
|
|
mapping = cache.get(cache_key)
|
|
|
|
|
return mapping
|
|
|
|
|
|
|
|
|
|
@classmethod
|
2023-02-09 12:48:25 +00:00
|
|
|
|
def expire_node_all_asset_ids_cache_mapping(cls, org_id):
|
2021-03-08 02:08:51 +00:00
|
|
|
|
cache_key = cls._get_cache_key_for_node_all_asset_ids_mapping(org_id)
|
2021-02-05 05:29:29 +00:00
|
|
|
|
cache.delete(cache_key)
|
|
|
|
|
|
|
|
|
|
@staticmethod
|
2021-03-08 02:08:51 +00:00
|
|
|
|
def _get_cache_key_for_node_all_asset_ids_mapping(org_id):
|
|
|
|
|
return 'ASSETS_ORG_NODE_ALL_ASSET_ids_MAPPING_{}'.format(org_id)
|
2021-02-05 05:29:29 +00:00
|
|
|
|
|
|
|
|
|
@classmethod
|
2024-01-02 08:11:56 +00:00
|
|
|
|
@timeit
|
2021-03-08 02:08:51 +00:00
|
|
|
|
def generate_node_all_asset_ids_mapping(cls, org_id):
|
2024-01-02 08:11:56 +00:00
|
|
|
|
logger.info(f'Generate node asset mapping: org_id={org_id}')
|
2021-02-05 05:29:29 +00:00
|
|
|
|
t1 = time.time()
|
|
|
|
|
with tmp_to_org(org_id):
|
2021-03-08 02:08:51 +00:00
|
|
|
|
node_ids_key = Node.objects.annotate(
|
2024-06-28 03:39:09 +00:00
|
|
|
|
char_id=F('id')
|
2021-02-25 06:45:21 +00:00
|
|
|
|
).values_list('char_id', 'key')
|
2024-06-28 03:39:09 +00:00
|
|
|
|
node_ids_key = [(str(node_id), node_key) for node_id, node_key in node_ids_key]
|
2021-02-05 05:29:29 +00:00
|
|
|
|
node_id_ancestor_keys_mapping = {
|
|
|
|
|
node_id: cls.get_node_ancestor_keys(node_key, with_self=True)
|
2021-03-08 02:08:51 +00:00
|
|
|
|
for node_id, node_key in node_ids_key
|
2021-02-05 05:29:29 +00:00
|
|
|
|
}
|
|
|
|
|
|
2024-01-02 08:11:56 +00:00
|
|
|
|
# * 直接取出全部. filter(node__org_id=org_id)(大规模下会更慢)
|
|
|
|
|
nodes_asset_ids = cls.assets.through.objects.all() \
|
2024-06-28 03:39:09 +00:00
|
|
|
|
.annotate(char_node_id=F('node_id')) \
|
|
|
|
|
.annotate(char_asset_id=F('asset_id')) \
|
2024-01-02 08:11:56 +00:00
|
|
|
|
.values_list('char_node_id', 'char_asset_id')
|
|
|
|
|
|
2021-02-05 05:29:29 +00:00
|
|
|
|
nodeid_assetsid_mapping = defaultdict(set)
|
2021-03-08 02:08:51 +00:00
|
|
|
|
for node_id, asset_id in nodes_asset_ids:
|
2024-06-28 03:39:09 +00:00
|
|
|
|
node_id, asset_id = str(node_id), str(asset_id)
|
2021-02-05 05:29:29 +00:00
|
|
|
|
nodeid_assetsid_mapping[node_id].add(asset_id)
|
|
|
|
|
|
|
|
|
|
t2 = time.time()
|
|
|
|
|
|
|
|
|
|
mapping = defaultdict(set)
|
2021-03-08 02:08:51 +00:00
|
|
|
|
for node_id, node_key in node_ids_key:
|
|
|
|
|
asset_ids = nodeid_assetsid_mapping[node_id]
|
2021-02-05 05:29:29 +00:00
|
|
|
|
node_ancestor_keys = node_id_ancestor_keys_mapping[node_id]
|
|
|
|
|
for ancestor_key in node_ancestor_keys:
|
2021-03-08 02:08:51 +00:00
|
|
|
|
mapping[ancestor_key].update(asset_ids)
|
2021-02-05 05:29:29 +00:00
|
|
|
|
|
|
|
|
|
t3 = time.time()
|
2024-01-02 08:11:56 +00:00
|
|
|
|
logger.info('Generate asset nodes mapping, DB query: {:.2f}s, mapping: {:.2f}s'.format(t2 - t1, t3 - t2))
|
2021-02-05 05:29:29 +00:00
|
|
|
|
return mapping
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
class NodeAssetsMixin(NodeAllAssetsMappingMixin):
|
|
|
|
|
org_id: str
|
2019-06-27 13:43:10 +00:00
|
|
|
|
key = ''
|
2019-08-21 12:27:21 +00:00
|
|
|
|
id = None
|
2021-02-05 05:29:29 +00:00
|
|
|
|
objects: Manager
|
2018-02-02 09:06:08 +00:00
|
|
|
|
|
2019-08-21 12:27:21 +00:00
|
|
|
|
def get_all_assets(self):
|
|
|
|
|
from .asset import Asset
|
2020-09-27 08:02:44 +00:00
|
|
|
|
q = Q(nodes__key__startswith=f'{self.key}:') | Q(nodes__key=self.key)
|
2020-08-16 15:08:58 +00:00
|
|
|
|
return Asset.objects.filter(q).distinct()
|
2019-08-21 12:27:21 +00:00
|
|
|
|
|
2023-02-09 12:48:25 +00:00
|
|
|
|
def get_assets_amount(self):
|
2023-09-22 07:05:18 +00:00
|
|
|
|
return self.get_all_assets().count()
|
2023-02-09 12:48:25 +00:00
|
|
|
|
|
2020-09-27 08:02:44 +00:00
|
|
|
|
@classmethod
|
|
|
|
|
def get_node_all_assets_by_key_v2(cls, key):
|
|
|
|
|
# 最初的写法是:
|
|
|
|
|
# Asset.objects.filter(Q(nodes__key__startswith=f'{node.key}:') | Q(nodes__id=node.id))
|
|
|
|
|
# 可是 startswith 会导致表关联时 Asset 索引失效
|
|
|
|
|
from .asset import Asset
|
|
|
|
|
node_ids = cls.objects.filter(
|
2021-02-05 05:29:29 +00:00
|
|
|
|
Q(key__startswith=f'{key}:') | Q(key=key)
|
2020-09-27 08:02:44 +00:00
|
|
|
|
).values_list('id', flat=True).distinct()
|
|
|
|
|
assets = Asset.objects.filter(
|
|
|
|
|
nodes__id__in=list(node_ids)
|
|
|
|
|
).distinct()
|
|
|
|
|
return assets
|
|
|
|
|
|
2019-08-21 12:27:21 +00:00
|
|
|
|
def get_assets(self):
|
|
|
|
|
from .asset import Asset
|
2020-09-27 08:02:44 +00:00
|
|
|
|
assets = Asset.objects.filter(nodes=self)
|
2019-08-21 12:27:21 +00:00
|
|
|
|
return assets.distinct()
|
2018-12-17 03:49:57 +00:00
|
|
|
|
|
2019-08-21 12:27:21 +00:00
|
|
|
|
def get_valid_assets(self):
|
|
|
|
|
return self.get_assets().valid()
|
2018-08-14 07:59:53 +00:00
|
|
|
|
|
2019-08-21 12:27:21 +00:00
|
|
|
|
def get_all_valid_assets(self):
|
|
|
|
|
return self.get_all_assets().valid()
|
|
|
|
|
|
2019-08-23 10:23:07 +00:00
|
|
|
|
@classmethod
|
2021-03-08 02:08:51 +00:00
|
|
|
|
def get_nodes_all_asset_ids_by_keys(cls, nodes_keys):
|
2021-02-05 05:29:29 +00:00
|
|
|
|
nodes = Node.objects.filter(key__in=nodes_keys)
|
2021-03-08 02:08:51 +00:00
|
|
|
|
asset_ids = cls.get_nodes_all_assets(*nodes).values_list('id', flat=True)
|
|
|
|
|
return asset_ids
|
2019-09-24 07:18:12 +00:00
|
|
|
|
|
|
|
|
|
@classmethod
|
2024-01-02 08:11:56 +00:00
|
|
|
|
@timeit
|
2024-01-24 07:41:03 +00:00
|
|
|
|
def get_nodes_all_assets(cls, *nodes, distinct=True):
|
2019-09-24 07:18:12 +00:00
|
|
|
|
from .asset import Asset
|
2021-02-05 05:29:29 +00:00
|
|
|
|
node_ids = set()
|
|
|
|
|
descendant_node_query = Q()
|
|
|
|
|
for n in nodes:
|
|
|
|
|
node_ids.add(n.id)
|
|
|
|
|
descendant_node_query |= Q(key__istartswith=f'{n.key}:')
|
|
|
|
|
if descendant_node_query:
|
|
|
|
|
_ids = Node.objects.order_by().filter(descendant_node_query).values_list('id', flat=True)
|
|
|
|
|
node_ids.update(_ids)
|
2024-01-24 07:41:03 +00:00
|
|
|
|
assets = Asset.objects.order_by().filter(nodes__id__in=node_ids)
|
|
|
|
|
if distinct:
|
|
|
|
|
assets = assets.distinct()
|
|
|
|
|
return assets
|
2021-02-05 05:29:29 +00:00
|
|
|
|
|
2021-03-08 02:08:51 +00:00
|
|
|
|
def get_all_asset_ids(self):
|
|
|
|
|
asset_ids = self.get_all_asset_ids_by_node_key(org_id=self.org_id, node_key=self.key)
|
|
|
|
|
return set(asset_ids)
|
2021-02-05 05:29:29 +00:00
|
|
|
|
|
|
|
|
|
@classmethod
|
2021-03-08 02:08:51 +00:00
|
|
|
|
def get_all_asset_ids_by_node_key(cls, org_id, node_key):
|
2021-02-05 05:29:29 +00:00
|
|
|
|
org_id = str(org_id)
|
2021-03-08 02:08:51 +00:00
|
|
|
|
nodekey_assetsid_mapping = cls.get_node_all_asset_ids_mapping(org_id)
|
|
|
|
|
asset_ids = nodekey_assetsid_mapping.get(node_key, [])
|
|
|
|
|
return set(asset_ids)
|
2019-08-23 10:23:07 +00:00
|
|
|
|
|
2019-08-21 12:27:21 +00:00
|
|
|
|
|
2019-08-28 03:43:55 +00:00
|
|
|
|
class SomeNodesMixin:
|
|
|
|
|
key = ''
|
|
|
|
|
default_key = '1'
|
|
|
|
|
empty_key = '-11'
|
|
|
|
|
empty_value = _("empty")
|
2020-09-27 08:02:44 +00:00
|
|
|
|
|
2019-08-28 03:43:55 +00:00
|
|
|
|
def is_default_node(self):
|
|
|
|
|
return self.key == self.default_key
|
|
|
|
|
|
|
|
|
|
def is_org_root(self):
|
|
|
|
|
if self.key.isdigit():
|
|
|
|
|
return True
|
|
|
|
|
else:
|
|
|
|
|
return False
|
|
|
|
|
|
2021-03-02 06:57:48 +00:00
|
|
|
|
@classmethod
|
|
|
|
|
def org_root(cls):
|
2021-03-11 12:09:23 +00:00
|
|
|
|
# 如果使用current_org 在set_current_org时会死循环
|
|
|
|
|
ori_org = get_current_org()
|
|
|
|
|
|
|
|
|
|
if ori_org and ori_org.is_default():
|
|
|
|
|
return cls.default_node()
|
2021-03-16 12:17:13 +00:00
|
|
|
|
|
2021-03-11 12:09:23 +00:00
|
|
|
|
if ori_org and ori_org.is_root():
|
2023-02-23 16:04:53 +00:00
|
|
|
|
return cls.default_node()
|
2021-03-11 12:09:23 +00:00
|
|
|
|
|
2021-03-02 06:57:48 +00:00
|
|
|
|
org_roots = cls.org_root_nodes()
|
2021-03-11 12:09:23 +00:00
|
|
|
|
org_roots_length = len(org_roots)
|
|
|
|
|
|
|
|
|
|
if org_roots_length == 1:
|
2021-03-16 12:17:13 +00:00
|
|
|
|
root = org_roots[0]
|
|
|
|
|
return root
|
2021-03-11 12:09:23 +00:00
|
|
|
|
elif org_roots_length == 0:
|
2021-03-02 06:57:48 +00:00
|
|
|
|
root = cls.create_org_root_node()
|
2021-03-11 12:09:23 +00:00
|
|
|
|
return root
|
|
|
|
|
else:
|
2021-03-16 12:17:13 +00:00
|
|
|
|
error = 'Current org {} root node not 1, get {}'.format(ori_org, org_roots_length)
|
|
|
|
|
raise ValueError(error)
|
2019-08-28 03:43:55 +00:00
|
|
|
|
|
|
|
|
|
@classmethod
|
2021-03-16 12:17:13 +00:00
|
|
|
|
def default_node(cls):
|
|
|
|
|
default_org = Organization.default()
|
|
|
|
|
with tmp_to_org(default_org):
|
|
|
|
|
defaults = {'value': default_org.name}
|
|
|
|
|
obj, created = cls.objects.get_or_create(defaults=defaults, key=cls.default_key)
|
|
|
|
|
return obj
|
2019-08-28 03:43:55 +00:00
|
|
|
|
|
2019-11-01 11:38:56 +00:00
|
|
|
|
@classmethod
|
2021-03-16 12:17:13 +00:00
|
|
|
|
def create_org_root_node(cls):
|
|
|
|
|
ori_org = get_current_org()
|
2019-11-01 12:02:34 +00:00
|
|
|
|
with transaction.atomic():
|
2021-03-16 12:17:13 +00:00
|
|
|
|
key = cls.get_next_org_root_node_key()
|
|
|
|
|
root = cls.objects.create(key=key, value=ori_org.name)
|
|
|
|
|
return root
|
|
|
|
|
|
|
|
|
|
@classmethod
|
|
|
|
|
def get_next_org_root_node_key(cls):
|
|
|
|
|
with tmp_to_root_org():
|
|
|
|
|
org_nodes_roots = cls.org_root_nodes()
|
|
|
|
|
org_nodes_roots_keys = org_nodes_roots.values_list('key', flat=True)
|
|
|
|
|
if not org_nodes_roots_keys:
|
|
|
|
|
org_nodes_roots_keys = ['1']
|
|
|
|
|
max_key = max([int(k) for k in org_nodes_roots_keys])
|
|
|
|
|
key = str(max_key + 1) if max_key > 0 else '2'
|
|
|
|
|
return key
|
|
|
|
|
|
|
|
|
|
@classmethod
|
|
|
|
|
def org_root_nodes(cls):
|
|
|
|
|
root_nodes = cls.objects.filter(parent_key='', key__regex=r'^[0-9]+$') \
|
|
|
|
|
.exclude(key__startswith='-').order_by('key')
|
|
|
|
|
return root_nodes
|
2019-11-01 11:38:56 +00:00
|
|
|
|
|
2019-08-28 03:43:55 +00:00
|
|
|
|
|
2022-12-20 12:23:42 +00:00
|
|
|
|
class Node(JMSOrgBaseModel, SomeNodesMixin, FamilyMixin, NodeAssetsMixin):
|
2019-06-27 13:43:10 +00:00
|
|
|
|
id = models.UUIDField(default=uuid.uuid4, primary_key=True)
|
|
|
|
|
key = models.CharField(unique=True, max_length=64, verbose_name=_("Key")) # '1:1:1:1'
|
|
|
|
|
value = models.CharField(max_length=128, verbose_name=_("Value"))
|
2020-10-30 02:16:49 +00:00
|
|
|
|
full_value = models.CharField(max_length=4096, verbose_name=_('Full value'), default='')
|
2019-06-27 13:43:10 +00:00
|
|
|
|
child_mark = models.IntegerField(default=0)
|
|
|
|
|
date_create = models.DateTimeField(auto_now_add=True)
|
2022-11-29 09:01:03 +00:00
|
|
|
|
parent_key = models.CharField(
|
|
|
|
|
max_length=64, verbose_name=_("Parent key"), db_index=True, default=''
|
|
|
|
|
)
|
2021-02-08 06:59:20 +00:00
|
|
|
|
assets_amount = models.IntegerField(default=0)
|
2018-01-30 11:57:47 +00:00
|
|
|
|
|
2019-07-04 09:20:42 +00:00
|
|
|
|
objects = OrgManager.from_queryset(NodeQuerySet)()
|
2019-06-27 13:43:10 +00:00
|
|
|
|
is_node = True
|
|
|
|
|
_parents = None
|
|
|
|
|
|
|
|
|
|
class Meta:
|
|
|
|
|
verbose_name = _("Node")
|
2020-12-03 02:41:52 +00:00
|
|
|
|
ordering = ['parent_key', 'value']
|
2022-03-04 02:16:21 +00:00
|
|
|
|
permissions = [
|
|
|
|
|
('match_node', _('Can match node')),
|
|
|
|
|
]
|
2019-06-27 13:43:10 +00:00
|
|
|
|
|
|
|
|
|
def __str__(self):
|
2020-10-30 02:43:44 +00:00
|
|
|
|
return self.full_value
|
2019-06-27 13:43:10 +00:00
|
|
|
|
|
|
|
|
|
def __gt__(self, other):
|
|
|
|
|
self_key = [int(k) for k in self.key.split(':')]
|
|
|
|
|
other_key = [int(k) for k in other.key.split(':')]
|
|
|
|
|
self_parent_key = self_key[:-1]
|
|
|
|
|
other_parent_key = other_key[:-1]
|
|
|
|
|
|
2019-08-23 10:23:07 +00:00
|
|
|
|
if self_parent_key and self_parent_key == other_parent_key:
|
2019-06-27 13:43:10 +00:00
|
|
|
|
return self.value > other.value
|
|
|
|
|
return self_key > other_key
|
|
|
|
|
|
|
|
|
|
def __lt__(self, other):
|
|
|
|
|
return not self.__gt__(other)
|
|
|
|
|
|
|
|
|
|
@property
|
|
|
|
|
def name(self):
|
|
|
|
|
return self.value
|
2018-12-17 10:20:44 +00:00
|
|
|
|
|
2020-10-30 02:16:49 +00:00
|
|
|
|
def computed_full_value(self):
|
2020-08-16 15:08:58 +00:00
|
|
|
|
# 不要在列表中调用该属性
|
|
|
|
|
values = self.__class__.objects.filter(
|
|
|
|
|
key__in=self.get_ancestor_keys()
|
|
|
|
|
).values_list('key', 'value')
|
|
|
|
|
values = [v for k, v in sorted(values, key=lambda x: len(x[0]))]
|
2020-11-05 12:01:22 +00:00
|
|
|
|
values.append(str(self.value))
|
2020-10-30 02:43:44 +00:00
|
|
|
|
return '/' + '/'.join(values)
|
2020-08-16 15:08:58 +00:00
|
|
|
|
|
2018-01-30 11:57:47 +00:00
|
|
|
|
@property
|
|
|
|
|
def level(self):
|
2018-02-01 09:14:15 +00:00
|
|
|
|
return len(self.key.split(':'))
|
2018-01-30 11:57:47 +00:00
|
|
|
|
|
2018-12-17 10:20:44 +00:00
|
|
|
|
def as_tree_node(self):
|
|
|
|
|
from common.tree import TreeNode
|
|
|
|
|
name = '{} ({})'.format(self.value, self.assets_amount)
|
|
|
|
|
data = {
|
|
|
|
|
'id': self.key,
|
|
|
|
|
'name': name,
|
|
|
|
|
'title': name,
|
|
|
|
|
'pId': self.parent_key,
|
|
|
|
|
'isParent': True,
|
2019-08-23 10:23:07 +00:00
|
|
|
|
'open': self.is_org_root(),
|
2018-12-17 10:20:44 +00:00
|
|
|
|
'meta': {
|
2021-07-30 07:19:00 +00:00
|
|
|
|
'data': {
|
2019-06-28 14:07:22 +00:00
|
|
|
|
"id": self.id,
|
|
|
|
|
"name": self.name,
|
|
|
|
|
"value": self.value,
|
|
|
|
|
"key": self.key,
|
2019-07-04 09:20:42 +00:00
|
|
|
|
"assets_amount": self.assets_amount,
|
2019-06-28 14:07:22 +00:00
|
|
|
|
},
|
2018-12-17 10:20:44 +00:00
|
|
|
|
'type': 'node'
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
tree_node = TreeNode(**data)
|
|
|
|
|
return tree_node
|
|
|
|
|
|
2021-04-29 03:30:01 +00:00
|
|
|
|
def has_offspring_assets(self):
|
|
|
|
|
# 拥有后代资产
|
|
|
|
|
return self.get_all_assets().exists()
|
2019-10-10 08:18:29 +00:00
|
|
|
|
|
|
|
|
|
def delete(self, using=None, keep_parents=False):
|
2021-04-29 03:30:01 +00:00
|
|
|
|
if self.has_offspring_assets():
|
2019-07-04 09:20:42 +00:00
|
|
|
|
return
|
2021-04-29 03:30:01 +00:00
|
|
|
|
self.all_children.delete()
|
2019-07-04 09:20:42 +00:00
|
|
|
|
return super().delete(using=using, keep_parents=keep_parents)
|
2020-10-30 02:43:44 +00:00
|
|
|
|
|
2020-10-30 05:29:23 +00:00
|
|
|
|
def update_child_full_value(self):
|
|
|
|
|
nodes = self.get_all_children(with_self=True)
|
|
|
|
|
sort_key_func = lambda n: [int(i) for i in n.key.split(':')]
|
|
|
|
|
nodes_sorted = sorted(list(nodes), key=sort_key_func)
|
|
|
|
|
nodes_mapper = {n.key: n for n in nodes_sorted}
|
2020-12-16 10:36:08 +00:00
|
|
|
|
if not self.is_org_root():
|
|
|
|
|
# 如果是org_root,那么parent_key为'', parent为自己,所以这种情况不处理
|
|
|
|
|
# 更新自己时,自己的parent_key获取不到
|
|
|
|
|
nodes_mapper.update({self.parent_key: self.parent})
|
2020-10-30 05:29:23 +00:00
|
|
|
|
for node in nodes_sorted:
|
|
|
|
|
parent = nodes_mapper.get(node.parent_key)
|
2020-12-14 02:21:26 +00:00
|
|
|
|
if not parent:
|
|
|
|
|
if node.parent_key:
|
|
|
|
|
logger.error(f'Node parent node in mapper: {node.parent_key} {node.value}')
|
2020-10-30 05:29:23 +00:00
|
|
|
|
continue
|
|
|
|
|
node.full_value = parent.full_value + '/' + node.value
|
|
|
|
|
self.__class__.objects.bulk_update(nodes, ['full_value'])
|
|
|
|
|
|
2020-10-30 02:43:44 +00:00
|
|
|
|
def save(self, *args, **kwargs):
|
|
|
|
|
self.full_value = self.computed_full_value()
|
2020-10-30 05:29:23 +00:00
|
|
|
|
instance = super().save(*args, **kwargs)
|
|
|
|
|
self.update_child_full_value()
|
|
|
|
|
return instance
|