2016-09-03 11:05:50 +00:00
|
|
|
# -*- coding: utf-8 -*-
|
|
|
|
#
|
2017-12-18 14:48:19 +00:00
|
|
|
import re
|
2017-03-23 16:27:33 +00:00
|
|
|
from collections import OrderedDict
|
2016-09-13 17:08:26 +00:00
|
|
|
from itertools import chain
|
2016-09-15 03:19:36 +00:00
|
|
|
import logging
|
2016-10-26 11:10:14 +00:00
|
|
|
import datetime
|
2017-12-10 16:29:25 +00:00
|
|
|
import uuid
|
2018-06-01 08:22:52 +00:00
|
|
|
from functools import wraps
|
2019-07-11 10:12:14 +00:00
|
|
|
import time
|
2019-02-27 00:45:00 +00:00
|
|
|
import ipaddress
|
2019-12-05 07:09:25 +00:00
|
|
|
import psutil
|
2021-09-24 07:31:25 +00:00
|
|
|
import platform
|
|
|
|
import os
|
|
|
|
|
|
|
|
from django.conf import settings
|
2016-11-06 16:39:26 +00:00
|
|
|
|
2019-08-16 08:33:05 +00:00
|
|
|
UUID_PATTERN = re.compile(r'\w{8}(-\w{4}){3}-\w{12}')
|
2019-02-27 00:45:00 +00:00
|
|
|
ipip_db = None
|
2016-10-14 16:49:59 +00:00
|
|
|
|
2016-09-10 13:08:10 +00:00
|
|
|
|
2016-09-13 17:08:26 +00:00
|
|
|
def combine_seq(s1, s2, callback=None):
|
|
|
|
for s in (s1, s2):
|
|
|
|
if not hasattr(s, '__iter__'):
|
|
|
|
return []
|
|
|
|
|
|
|
|
seq = chain(s1, s2)
|
|
|
|
if callback:
|
|
|
|
seq = map(callback, seq)
|
|
|
|
return seq
|
|
|
|
|
|
|
|
|
2019-12-16 08:53:29 +00:00
|
|
|
def get_logger(name=''):
|
2022-04-13 12:24:56 +00:00
|
|
|
if '/' in name:
|
|
|
|
name = os.path.basename(name).replace('.py', '')
|
2016-09-15 03:19:36 +00:00
|
|
|
return logging.getLogger('jumpserver.%s' % name)
|
2016-09-18 16:07:52 +00:00
|
|
|
|
|
|
|
|
2019-12-16 08:53:29 +00:00
|
|
|
def get_syslogger(name=''):
|
|
|
|
return logging.getLogger('syslog.%s' % name)
|
2019-08-21 12:27:21 +00:00
|
|
|
|
|
|
|
|
2016-10-26 11:10:14 +00:00
|
|
|
def timesince(dt, since='', default="just now"):
|
|
|
|
"""
|
|
|
|
Returns string representing "time since" e.g.
|
|
|
|
3 days, 5 hours.
|
|
|
|
"""
|
|
|
|
|
2020-12-03 11:14:28 +00:00
|
|
|
if not since:
|
2016-10-26 11:10:14 +00:00
|
|
|
since = datetime.datetime.utcnow()
|
|
|
|
|
|
|
|
if since is None:
|
|
|
|
return default
|
|
|
|
|
|
|
|
diff = since - dt
|
|
|
|
|
|
|
|
periods = (
|
|
|
|
(diff.days / 365, "year", "years"),
|
|
|
|
(diff.days / 30, "month", "months"),
|
|
|
|
(diff.days / 7, "week", "weeks"),
|
|
|
|
(diff.days, "day", "days"),
|
|
|
|
(diff.seconds / 3600, "hour", "hours"),
|
|
|
|
(diff.seconds / 60, "minute", "minutes"),
|
|
|
|
(diff.seconds, "second", "seconds"),
|
|
|
|
)
|
|
|
|
|
|
|
|
for period, singular, plural in periods:
|
|
|
|
if period:
|
|
|
|
return "%d %s" % (period, singular if period == 1 else plural)
|
|
|
|
return default
|
|
|
|
|
2016-11-01 09:21:16 +00:00
|
|
|
|
2016-11-10 08:59:50 +00:00
|
|
|
def setattr_bulk(seq, key, value):
|
|
|
|
def set_attr(obj):
|
|
|
|
setattr(obj, key, value)
|
|
|
|
return obj
|
|
|
|
return map(set_attr, seq)
|
|
|
|
|
|
|
|
|
2018-04-07 16:16:37 +00:00
|
|
|
def set_or_append_attr_bulk(seq, key, value):
|
|
|
|
for obj in seq:
|
|
|
|
ori = getattr(obj, key, None)
|
|
|
|
if ori:
|
|
|
|
value += " " + ori
|
|
|
|
setattr(obj, key, value)
|
|
|
|
|
|
|
|
|
2017-03-15 16:19:47 +00:00
|
|
|
def capacity_convert(size, expect='auto', rate=1000):
|
|
|
|
"""
|
2017-03-23 16:27:33 +00:00
|
|
|
:param size: '100MB', '1G'
|
2017-03-15 16:19:47 +00:00
|
|
|
:param expect: 'K, M, G, T
|
2017-03-23 16:27:33 +00:00
|
|
|
:param rate: Default 1000, may be 1024
|
2017-03-15 16:19:47 +00:00
|
|
|
:return:
|
|
|
|
"""
|
|
|
|
rate_mapping = (
|
|
|
|
('K', rate),
|
|
|
|
('KB', rate),
|
|
|
|
('M', rate**2),
|
|
|
|
('MB', rate**2),
|
|
|
|
('G', rate**3),
|
|
|
|
('GB', rate**3),
|
|
|
|
('T', rate**4),
|
|
|
|
('TB', rate**4),
|
|
|
|
)
|
|
|
|
|
|
|
|
rate_mapping = OrderedDict(rate_mapping)
|
|
|
|
|
|
|
|
std_size = 0 # To KB
|
|
|
|
for unit in rate_mapping:
|
|
|
|
if size.endswith(unit):
|
|
|
|
try:
|
|
|
|
std_size = float(size.strip(unit).strip()) * rate_mapping[unit]
|
|
|
|
except ValueError:
|
|
|
|
pass
|
|
|
|
|
|
|
|
if expect == 'auto':
|
|
|
|
for unit, rate_ in rate_mapping.items():
|
2019-08-08 09:59:51 +00:00
|
|
|
if rate > std_size/rate_ >= 1 or unit == "T":
|
2017-03-15 16:19:47 +00:00
|
|
|
expect = unit
|
|
|
|
break
|
2018-01-02 06:29:37 +00:00
|
|
|
|
|
|
|
if expect not in rate_mapping:
|
|
|
|
expect = 'K'
|
|
|
|
|
2017-03-15 16:19:47 +00:00
|
|
|
expect_size = std_size / rate_mapping[expect]
|
|
|
|
return expect_size, expect
|
|
|
|
|
|
|
|
|
|
|
|
def sum_capacity(cap_list):
|
|
|
|
total = 0
|
|
|
|
for cap in cap_list:
|
|
|
|
size, _ = capacity_convert(cap, expect='K')
|
|
|
|
total += size
|
|
|
|
total = '{} K'.format(total)
|
|
|
|
return capacity_convert(total, expect='auto')
|
|
|
|
|
|
|
|
|
2017-12-10 16:29:25 +00:00
|
|
|
def get_short_uuid_str():
|
|
|
|
return str(uuid.uuid4()).split('-')[-1]
|
|
|
|
|
|
|
|
|
2018-04-10 12:45:01 +00:00
|
|
|
def is_uuid(seq):
|
2019-06-28 14:07:22 +00:00
|
|
|
if isinstance(seq, uuid.UUID):
|
|
|
|
return True
|
|
|
|
elif isinstance(seq, str) and UUID_PATTERN.match(seq):
|
2018-04-10 12:45:01 +00:00
|
|
|
return True
|
2019-06-28 14:07:22 +00:00
|
|
|
elif isinstance(seq, (list, tuple)):
|
2021-03-11 12:17:44 +00:00
|
|
|
return all([is_uuid(x) for x in seq])
|
2019-06-28 14:07:22 +00:00
|
|
|
return False
|
2017-12-18 14:48:19 +00:00
|
|
|
|
|
|
|
|
2018-09-03 03:24:25 +00:00
|
|
|
def get_request_ip(request):
|
|
|
|
x_forwarded_for = request.META.get('HTTP_X_FORWARDED_FOR', '').split(',')
|
2019-05-20 04:30:55 +00:00
|
|
|
|
2018-09-03 03:24:25 +00:00
|
|
|
if x_forwarded_for and x_forwarded_for[0]:
|
|
|
|
login_ip = x_forwarded_for[0]
|
|
|
|
else:
|
|
|
|
login_ip = request.META.get('REMOTE_ADDR', '')
|
|
|
|
return login_ip
|
|
|
|
|
|
|
|
|
2019-11-05 10:46:29 +00:00
|
|
|
def get_request_ip_or_data(request):
|
|
|
|
ip = ''
|
|
|
|
if hasattr(request, 'data'):
|
|
|
|
ip = request.data.get('remote_addr', '')
|
|
|
|
ip = ip or get_request_ip(request)
|
|
|
|
return ip
|
|
|
|
|
|
|
|
|
2020-09-29 07:31:45 +00:00
|
|
|
def get_request_user_agent(request):
|
|
|
|
user_agent = request.META.get('HTTP_USER_AGENT', '')
|
|
|
|
return user_agent
|
|
|
|
|
|
|
|
|
2019-02-27 00:45:00 +00:00
|
|
|
def validate_ip(ip):
|
|
|
|
try:
|
|
|
|
ipaddress.ip_address(ip)
|
|
|
|
return True
|
|
|
|
except ValueError:
|
|
|
|
pass
|
|
|
|
return False
|
2018-10-23 11:22:18 +00:00
|
|
|
|
|
|
|
|
2018-06-01 08:22:52 +00:00
|
|
|
def with_cache(func):
|
|
|
|
cache = {}
|
|
|
|
key = "_{}.{}".format(func.__module__, func.__name__)
|
|
|
|
|
|
|
|
@wraps(func)
|
|
|
|
def wrapper(*args, **kwargs):
|
|
|
|
cached = cache.get(key)
|
|
|
|
if cached:
|
|
|
|
return cached
|
|
|
|
res = func(*args, **kwargs)
|
|
|
|
cache[key] = res
|
|
|
|
return res
|
|
|
|
return wrapper
|
2018-07-20 09:49:47 +00:00
|
|
|
|
|
|
|
|
2019-07-11 10:12:14 +00:00
|
|
|
logger = get_logger(__name__)
|
|
|
|
|
|
|
|
|
|
|
|
def timeit(func):
|
|
|
|
def wrapper(*args, **kwargs):
|
2021-03-10 02:09:55 +00:00
|
|
|
name = func
|
|
|
|
for attr in ('__qualname__', '__name__'):
|
|
|
|
if hasattr(func, attr):
|
|
|
|
name = getattr(func, attr)
|
|
|
|
break
|
|
|
|
|
2020-03-12 08:24:38 +00:00
|
|
|
logger.debug("Start call: {}".format(name))
|
2019-07-11 10:12:14 +00:00
|
|
|
now = time.time()
|
|
|
|
result = func(*args, **kwargs)
|
|
|
|
using = (time.time() - now) * 1000
|
2020-03-12 08:24:38 +00:00
|
|
|
msg = "End call {}, using: {:.1f}ms".format(name, using)
|
2019-07-11 10:12:14 +00:00
|
|
|
logger.debug(msg)
|
|
|
|
return result
|
|
|
|
return wrapper
|
2019-09-18 14:06:46 +00:00
|
|
|
|
|
|
|
|
|
|
|
def group_obj_by_count(objs, count=50):
|
|
|
|
objs_grouped = [
|
|
|
|
objs[i:i + count] for i in range(0, len(objs), count)
|
|
|
|
]
|
|
|
|
return objs_grouped
|
|
|
|
|
|
|
|
|
|
|
|
def dict_get_any(d, keys):
|
|
|
|
for key in keys:
|
|
|
|
value = d.get(key)
|
|
|
|
if value:
|
|
|
|
return value
|
|
|
|
return None
|
2019-09-19 09:27:49 +00:00
|
|
|
|
|
|
|
|
|
|
|
class lazyproperty:
|
|
|
|
def __init__(self, func):
|
|
|
|
self.func = func
|
|
|
|
|
|
|
|
def __get__(self, instance, cls):
|
|
|
|
if instance is None:
|
|
|
|
return self
|
|
|
|
else:
|
|
|
|
value = self.func(instance)
|
|
|
|
setattr(instance, self.func.__name__, value)
|
2019-09-25 08:31:58 +00:00
|
|
|
return value
|
2019-12-05 07:09:25 +00:00
|
|
|
|
|
|
|
|
2021-08-06 11:16:18 +00:00
|
|
|
def get_disk_usage(path):
|
|
|
|
return psutil.disk_usage(path=path).percent
|
2021-02-05 05:29:29 +00:00
|
|
|
|
|
|
|
|
2021-07-30 07:42:06 +00:00
|
|
|
def get_cpu_load():
|
|
|
|
cpu_load_1, cpu_load_5, cpu_load_15 = psutil.getloadavg()
|
|
|
|
cpu_count = psutil.cpu_count()
|
|
|
|
single_cpu_load_1 = cpu_load_1 / cpu_count
|
|
|
|
single_cpu_load_1 = '%.2f' % single_cpu_load_1
|
|
|
|
return float(single_cpu_load_1)
|
|
|
|
|
|
|
|
|
2021-11-26 08:46:45 +00:00
|
|
|
def get_docker_mem_usage_if_limit():
|
|
|
|
try:
|
|
|
|
with open('/sys/fs/cgroup/memory/memory.limit_in_bytes') as f:
|
|
|
|
limit_in_bytes = int(f.readline())
|
|
|
|
total = psutil.virtual_memory().total
|
|
|
|
if limit_in_bytes >= total:
|
|
|
|
raise ValueError('Not limit')
|
|
|
|
|
|
|
|
with open('/sys/fs/cgroup/memory/memory.usage_in_bytes') as f:
|
|
|
|
usage_in_bytes = int(f.readline())
|
|
|
|
|
|
|
|
with open('/sys/fs/cgroup/memory/memory.stat') as f:
|
2021-11-30 07:21:07 +00:00
|
|
|
inactive_file = 0
|
|
|
|
for line in f:
|
|
|
|
if line.startswith('total_inactive_file'):
|
|
|
|
name, inactive_file = line.split()
|
|
|
|
break
|
|
|
|
|
|
|
|
if line.startswith('inactive_file'):
|
|
|
|
name, inactive_file = line.split()
|
|
|
|
continue
|
|
|
|
|
|
|
|
inactive_file = int(inactive_file)
|
|
|
|
return ((usage_in_bytes - inactive_file) / limit_in_bytes) * 100
|
2021-11-26 08:46:45 +00:00
|
|
|
|
|
|
|
except Exception as e:
|
|
|
|
return None
|
|
|
|
|
|
|
|
|
2021-08-06 11:16:18 +00:00
|
|
|
def get_memory_usage():
|
2021-11-26 08:46:45 +00:00
|
|
|
usage = get_docker_mem_usage_if_limit()
|
|
|
|
if usage is not None:
|
|
|
|
return usage
|
2021-07-30 07:42:06 +00:00
|
|
|
return psutil.virtual_memory().percent
|
|
|
|
|
|
|
|
|
2021-02-05 05:29:29 +00:00
|
|
|
class Time:
|
|
|
|
def __init__(self):
|
|
|
|
self._timestamps = []
|
|
|
|
self._msgs = []
|
|
|
|
|
|
|
|
def begin(self):
|
|
|
|
self._timestamps.append(time.time())
|
|
|
|
|
|
|
|
def time(self, msg):
|
|
|
|
self._timestamps.append(time.time())
|
|
|
|
self._msgs.append(msg)
|
|
|
|
|
|
|
|
def print(self):
|
|
|
|
last, *timestamps = self._timestamps
|
|
|
|
for timestamp, msg in zip(timestamps, self._msgs):
|
|
|
|
logger.debug(f'TIME_IT: {msg} {timestamp-last}')
|
|
|
|
last = timestamp
|
2021-02-07 02:15:39 +00:00
|
|
|
|
|
|
|
|
2021-10-18 03:25:39 +00:00
|
|
|
def bulk_get(d, keys, default=None):
|
2021-02-26 09:33:11 +00:00
|
|
|
values = []
|
|
|
|
for key in keys:
|
|
|
|
values.append(d.get(key, default))
|
|
|
|
return values
|
2021-07-27 08:06:00 +00:00
|
|
|
|
|
|
|
|
|
|
|
def unique(objects, key=None):
|
|
|
|
seen = OrderedDict()
|
|
|
|
|
|
|
|
if key is None:
|
|
|
|
key = lambda item: item
|
|
|
|
|
|
|
|
for obj in objects:
|
|
|
|
v = key(obj)
|
|
|
|
if v not in seen:
|
|
|
|
seen[v] = obj
|
|
|
|
return list(seen.values())
|
2021-09-24 07:31:25 +00:00
|
|
|
|
|
|
|
|
|
|
|
def get_file_by_arch(dir, filename):
|
|
|
|
platform_name = platform.system()
|
|
|
|
arch = platform.machine()
|
|
|
|
|
|
|
|
file_path = os.path.join(
|
|
|
|
settings.BASE_DIR, dir, platform_name, arch, filename
|
|
|
|
)
|
|
|
|
return file_path
|
2022-03-30 11:07:49 +00:00
|
|
|
|
|
|
|
|
|
|
|
def pretty_string(data: str, max_length=128, ellipsis_str='...'):
|
|
|
|
"""
|
|
|
|
params:
|
|
|
|
data: abcdefgh
|
|
|
|
max_length: 7
|
|
|
|
ellipsis_str: ...
|
|
|
|
return:
|
|
|
|
ab...gh
|
|
|
|
"""
|
|
|
|
if len(data) < max_length:
|
|
|
|
return data
|
|
|
|
remain_length = max_length - len(ellipsis_str)
|
|
|
|
half = remain_length // 2
|
|
|
|
if half <= 1:
|
|
|
|
return data[:max_length]
|
|
|
|
start = data[:half]
|
|
|
|
end = data[-half:]
|
|
|
|
data = f'{start}{ellipsis_str}{end}'
|
|
|
|
return data
|
2022-06-23 05:52:28 +00:00
|
|
|
|
|
|
|
|
|
|
|
def group_by_count(it, count):
|
|
|
|
return [it[i:i+count] for i in range(0, len(it), count)]
|