mirror of https://github.com/jumpserver/jumpserver
feat: 解决冲突
commit
85aad7ba62
|
@ -0,0 +1,3 @@
|
|||
[settings]
|
||||
line_length=120
|
||||
known_first_party=common,users,assets,perms,authentication,jumpserver,notification,ops,orgs,rbac,settings,terminal,tickets
|
117
Dockerfile
117
Dockerfile
|
@ -1,65 +1,78 @@
|
|||
FROM python:3.8-slim as stage-build
|
||||
ARG TARGETARCH
|
||||
|
||||
ARG VERSION
|
||||
ENV VERSION=$VERSION
|
||||
|
||||
WORKDIR /opt/jumpserver
|
||||
ADD . .
|
||||
RUN cd utils && bash -ixeu build.sh
|
||||
|
||||
FROM python:3.8-slim
|
||||
ARG TARGETARCH
|
||||
MAINTAINER JumpServer Team <ibuler@qq.com>
|
||||
|
||||
ARG BUILD_DEPENDENCIES=" \
|
||||
g++ \
|
||||
make \
|
||||
pkg-config"
|
||||
g++ \
|
||||
make \
|
||||
pkg-config"
|
||||
|
||||
ARG DEPENDENCIES=" \
|
||||
default-libmysqlclient-dev \
|
||||
freetds-dev \
|
||||
libpq-dev \
|
||||
libffi-dev \
|
||||
libldap2-dev \
|
||||
libsasl2-dev \
|
||||
libxml2-dev \
|
||||
libxmlsec1-dev \
|
||||
libxmlsec1-openssl \
|
||||
libaio-dev \
|
||||
openssh-client \
|
||||
sshpass"
|
||||
default-libmysqlclient-dev \
|
||||
freetds-dev \
|
||||
libpq-dev \
|
||||
libffi-dev \
|
||||
libjpeg-dev \
|
||||
libldap2-dev \
|
||||
libsasl2-dev \
|
||||
libxml2-dev \
|
||||
libxmlsec1-dev \
|
||||
libxmlsec1-openssl \
|
||||
libaio-dev \
|
||||
openssh-client \
|
||||
sshpass"
|
||||
|
||||
ARG TOOLS=" \
|
||||
curl \
|
||||
default-mysql-client \
|
||||
iproute2 \
|
||||
iputils-ping \
|
||||
locales \
|
||||
procps \
|
||||
redis-tools \
|
||||
telnet \
|
||||
vim \
|
||||
unzip \
|
||||
wget"
|
||||
ca-certificates \
|
||||
curl \
|
||||
default-mysql-client \
|
||||
iputils-ping \
|
||||
locales \
|
||||
procps \
|
||||
redis-tools \
|
||||
telnet \
|
||||
vim \
|
||||
unzip \
|
||||
wget"
|
||||
|
||||
RUN sed -i 's@http://.*.debian.org@http://mirrors.ustc.edu.cn@g' /etc/apt/sources.list \
|
||||
ARG APT_MIRROR=http://mirrors.ustc.edu.cn
|
||||
|
||||
RUN --mount=type=cache,target=/var/cache/apt,sharing=locked,id=core \
|
||||
sed -i "s@http://.*.debian.org@${APT_MIRROR}@g" /etc/apt/sources.list \
|
||||
&& rm -f /etc/apt/apt.conf.d/docker-clean \
|
||||
&& ln -sf /usr/share/zoneinfo/Asia/Shanghai /etc/localtime \
|
||||
&& apt-get update \
|
||||
&& apt-get -y install --no-install-recommends ${BUILD_DEPENDENCIES} \
|
||||
&& apt-get -y install --no-install-recommends ${DEPENDENCIES} \
|
||||
&& apt-get -y install --no-install-recommends ${TOOLS} \
|
||||
&& localedef -c -f UTF-8 -i zh_CN zh_CN.UTF-8 \
|
||||
&& cp /usr/share/zoneinfo/Asia/Shanghai /etc/localtime \
|
||||
&& mkdir -p /root/.ssh/ \
|
||||
&& echo "Host *\n\tStrictHostKeyChecking no\n\tUserKnownHostsFile /dev/null" > /root/.ssh/config \
|
||||
&& sed -i "s@# alias l@alias l@g" ~/.bashrc \
|
||||
&& echo "set mouse-=a" > ~/.vimrc \
|
||||
&& echo "no" | dpkg-reconfigure dash \
|
||||
&& echo "zh_CN.UTF-8" | dpkg-reconfigure locales \
|
||||
&& sed -i "s@# export @export @g" ~/.bashrc \
|
||||
&& sed -i "s@# alias @alias @g" ~/.bashrc \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
|
||||
ARG TARGETARCH
|
||||
ARG ORACLE_LIB_MAJOR=19
|
||||
ARG ORACLE_LIB_MINOR=10
|
||||
ENV ORACLE_FILE="instantclient-basiclite-linux.${TARGETARCH:-amd64}-${ORACLE_LIB_MAJOR}.${ORACLE_LIB_MINOR}.0.0.0dbru.zip"
|
||||
ARG DOWNLOAD_URL=https://download.jumpserver.org
|
||||
|
||||
RUN mkdir -p /opt/oracle/ \
|
||||
&& cd /opt/oracle/ \
|
||||
&& wget https://download.jumpserver.org/files/oracle/${ORACLE_FILE} \
|
||||
&& unzip instantclient-basiclite-linux.${TARGETARCH-amd64}-19.10.0.0.0dbru.zip \
|
||||
&& mv instantclient_${ORACLE_LIB_MAJOR}_${ORACLE_LIB_MINOR} instantclient \
|
||||
&& echo "/opt/oracle/instantclient" > /etc/ld.so.conf.d/oracle-instantclient.conf \
|
||||
&& wget ${DOWNLOAD_URL}/public/instantclient-basiclite-linux.${TARGETARCH}-19.10.0.0.0.zip \
|
||||
&& unzip instantclient-basiclite-linux.${TARGETARCH}-19.10.0.0.0.zip \
|
||||
&& sh -c "echo /opt/oracle/instantclient_19_10 > /etc/ld.so.conf.d/oracle-instantclient.conf" \
|
||||
&& ldconfig \
|
||||
&& rm -f ${ORACLE_FILE}
|
||||
&& rm -f instantclient-basiclite-linux.${TARGETARCH}-19.10.0.0.0.zip
|
||||
|
||||
WORKDIR /tmp/build
|
||||
COPY ./requirements ./requirements
|
||||
|
@ -68,27 +81,25 @@ ARG PIP_MIRROR=https://pypi.douban.com/simple
|
|||
ENV PIP_MIRROR=$PIP_MIRROR
|
||||
ARG PIP_JMS_MIRROR=https://pypi.douban.com/simple
|
||||
ENV PIP_JMS_MIRROR=$PIP_JMS_MIRROR
|
||||
# 因为以 jms 或者 jumpserver 开头的 mirror 上可能没有
|
||||
RUN pip install --upgrade pip==20.2.4 setuptools==49.6.0 wheel==0.34.2 -i ${PIP_MIRROR} \
|
||||
&& pip install --no-cache-dir $(grep -E 'jms|jumpserver' requirements/requirements.txt) -i ${PIP_JMS_MIRROR} \
|
||||
&& pip install --no-cache-dir -r requirements/requirements.txt -i ${PIP_MIRROR} \
|
||||
&& rm -rf ~/.cache/pip
|
||||
|
||||
ARG VERSION
|
||||
ENV VERSION=$VERSION
|
||||
ENV ANSIBLE_LIBRARY=/opt/jumpserver/apps/ops/ansible/modules
|
||||
ADD . .
|
||||
RUN cd utils \
|
||||
&& bash -ixeu build.sh \
|
||||
&& mv ../release/jumpserver /opt/jumpserver \
|
||||
&& rm -rf /tmp/build \
|
||||
&& echo > /opt/jumpserver/config.yml
|
||||
RUN --mount=type=cache,target=/root/.cache/pip \
|
||||
set -ex \
|
||||
&& pip config set global.index-url ${PIP_MIRROR} \
|
||||
&& pip install --upgrade pip \
|
||||
&& pip install --upgrade setuptools wheel \
|
||||
&& pip install $(grep -E 'jms|jumpserver' requirements/requirements.txt) -i ${PIP_JMS_MIRROR} \
|
||||
&& pip install -r requirements/requirements.txt
|
||||
|
||||
COPY --from=stage-build /opt/jumpserver/release/jumpserver /opt/jumpserver
|
||||
RUN echo > /opt/jumpserver/config.yml \
|
||||
&& rm -rf /tmp/build
|
||||
|
||||
WORKDIR /opt/jumpserver
|
||||
VOLUME /opt/jumpserver/data
|
||||
VOLUME /opt/jumpserver/logs
|
||||
|
||||
ENV LANG=zh_CN.UTF-8
|
||||
ENV ANSIBLE_LIBRARY=/opt/jumpserver/apps/ops/ansible/modules
|
||||
|
||||
EXPOSE 8070
|
||||
EXPOSE 8080
|
||||
|
|
|
@ -0,0 +1,96 @@
|
|||
FROM python:3.8-slim as stage-build
|
||||
ARG TARGETARCH
|
||||
|
||||
ARG VERSION
|
||||
ENV VERSION=$VERSION
|
||||
|
||||
WORKDIR /opt/jumpserver
|
||||
ADD . .
|
||||
RUN cd utils && bash -ixeu build.sh
|
||||
|
||||
FROM python:3.8-slim
|
||||
ARG TARGETARCH
|
||||
MAINTAINER JumpServer Team <ibuler@qq.com>
|
||||
|
||||
ARG BUILD_DEPENDENCIES=" \
|
||||
g++ \
|
||||
make \
|
||||
pkg-config"
|
||||
|
||||
ARG DEPENDENCIES=" \
|
||||
default-libmysqlclient-dev \
|
||||
freetds-dev \
|
||||
libpq-dev \
|
||||
libffi-dev \
|
||||
libjpeg-dev \
|
||||
libldap2-dev \
|
||||
libsasl2-dev \
|
||||
libxml2-dev \
|
||||
libxmlsec1-dev \
|
||||
libxmlsec1-openssl \
|
||||
libaio-dev \
|
||||
openssh-client \
|
||||
sshpass"
|
||||
|
||||
ARG TOOLS=" \
|
||||
ca-certificates \
|
||||
curl \
|
||||
default-mysql-client \
|
||||
iputils-ping \
|
||||
locales \
|
||||
netcat \
|
||||
redis-server \
|
||||
telnet \
|
||||
vim \
|
||||
unzip \
|
||||
wget"
|
||||
|
||||
RUN --mount=type=cache,target=/var/cache/apt,sharing=locked,id=core \
|
||||
set -ex \
|
||||
&& ln -sf /usr/share/zoneinfo/Asia/Shanghai /etc/localtime \
|
||||
&& apt-get update \
|
||||
&& apt-get -y install --no-install-recommends ${BUILD_DEPENDENCIES} \
|
||||
&& apt-get -y install --no-install-recommends ${DEPENDENCIES} \
|
||||
&& apt-get -y install --no-install-recommends ${TOOLS} \
|
||||
&& mkdir -p /root/.ssh/ \
|
||||
&& echo "Host *\n\tStrictHostKeyChecking no\n\tUserKnownHostsFile /dev/null" > /root/.ssh/config \
|
||||
&& echo "set mouse-=a" > ~/.vimrc \
|
||||
&& echo "no" | dpkg-reconfigure dash \
|
||||
&& echo "zh_CN.UTF-8" | dpkg-reconfigure locales \
|
||||
&& sed -i "s@# export @export @g" ~/.bashrc \
|
||||
&& sed -i "s@# alias @alias @g" ~/.bashrc \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
|
||||
WORKDIR /tmp/build
|
||||
COPY ./requirements ./requirements
|
||||
|
||||
ARG PIP_MIRROR=https://pypi.douban.com/simple
|
||||
ENV PIP_MIRROR=$PIP_MIRROR
|
||||
ARG PIP_JMS_MIRROR=https://pypi.douban.com/simple
|
||||
ENV PIP_JMS_MIRROR=$PIP_JMS_MIRROR
|
||||
|
||||
RUN --mount=type=cache,target=/root/.cache/pip \
|
||||
set -ex \
|
||||
&& pip config set global.index-url ${PIP_MIRROR} \
|
||||
&& pip install --upgrade pip \
|
||||
&& pip install --upgrade setuptools wheel \
|
||||
&& pip install https://download.jumpserver.org/pypi/simple/cryptography/cryptography-36.0.1-cp38-cp38-linux_loongarch64.whl \
|
||||
&& pip install https://download.jumpserver.org/pypi/simple/greenlet/greenlet-1.1.2-cp38-cp38-linux_loongarch64.whl \
|
||||
&& pip install $(grep 'PyNaCl' requirements/requirements.txt) \
|
||||
&& GRPC_PYTHON_BUILD_SYSTEM_OPENSSL=true pip install grpcio \
|
||||
&& pip install $(grep -E 'jms|jumpserver' requirements/requirements.txt) -i ${PIP_JMS_MIRROR} \
|
||||
&& pip install -r requirements/requirements.txt
|
||||
|
||||
COPY --from=stage-build /opt/jumpserver/release/jumpserver /opt/jumpserver
|
||||
RUN echo > /opt/jumpserver/config.yml \
|
||||
&& rm -rf /tmp/build
|
||||
|
||||
WORKDIR /opt/jumpserver
|
||||
VOLUME /opt/jumpserver/data
|
||||
VOLUME /opt/jumpserver/logs
|
||||
|
||||
ENV LANG=zh_CN.UTF-8
|
||||
|
||||
EXPOSE 8070
|
||||
EXPOSE 8080
|
||||
ENTRYPOINT ["./entrypoint.sh"]
|
|
@ -0,0 +1,52 @@
|
|||
from rest_framework.views import APIView
|
||||
from rest_framework import status
|
||||
from django.http.response import JsonResponse
|
||||
from django.utils.translation import ugettext_lazy as _
|
||||
|
||||
from common.drf.api import JMSBulkModelViewSet
|
||||
from common.const.choices import ConnectMethodChoices
|
||||
from ..models import ConnectACL
|
||||
from .. import serializers
|
||||
|
||||
__all__ = ['ConnectACLViewSet', 'ConnectMethodsAPI', 'ConnectMethodPermissionsAPI']
|
||||
|
||||
|
||||
class ConnectACLViewSet(JMSBulkModelViewSet):
|
||||
queryset = ConnectACL.objects.all()
|
||||
filterset_fields = ('name', )
|
||||
search_fields = ('name',)
|
||||
serializer_class = serializers.ConnectACLSerializer
|
||||
|
||||
|
||||
class ConnectMethodsAPI(APIView):
|
||||
rbac_perms = {
|
||||
'GET': 'acls.view_connnectacl',
|
||||
}
|
||||
|
||||
@staticmethod
|
||||
def get(request, *args, **kwargs):
|
||||
data = []
|
||||
for m in ConnectMethodChoices.choices:
|
||||
data.append({'label': m[1], 'value': m[0]})
|
||||
return JsonResponse(data, safe=False)
|
||||
|
||||
|
||||
class ConnectMethodPermissionsAPI(APIView):
|
||||
rbac_perms = {
|
||||
'GET': 'acls.view_connnectacl',
|
||||
}
|
||||
|
||||
@staticmethod
|
||||
def get(request, *args, **kwargs):
|
||||
login_type = request.query_params.get('login_type')
|
||||
if not login_type:
|
||||
rules = ConnectACL().all_rules(request.user)
|
||||
return JsonResponse({'rules': rules})
|
||||
|
||||
acl = ConnectACL.match(request.user, login_type)
|
||||
if acl:
|
||||
err = _('The current user is not allowed to login in this way')
|
||||
return JsonResponse({'error': err})
|
||||
else:
|
||||
return JsonResponse({'msg': 'ok'})
|
||||
|
|
@ -0,0 +1,40 @@
|
|||
# Generated by Django 3.2.16 on 2022-11-30 02:46
|
||||
|
||||
from django.conf import settings
|
||||
import django.core.validators
|
||||
from django.db import migrations, models
|
||||
import uuid
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('users', '0040_alter_user_source'),
|
||||
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
|
||||
('acls', '0003_auto_20211130_1037'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.CreateModel(
|
||||
name='ConnectACL',
|
||||
fields=[
|
||||
('org_id', models.CharField(blank=True, db_index=True, default='', max_length=36, verbose_name='Organization')),
|
||||
('id', models.UUIDField(default=uuid.uuid4, primary_key=True, serialize=False)),
|
||||
('created_by', models.CharField(blank=True, max_length=32, null=True, verbose_name='Created by')),
|
||||
('date_created', models.DateTimeField(auto_now_add=True, null=True, verbose_name='Date created')),
|
||||
('date_updated', models.DateTimeField(auto_now=True, verbose_name='Date updated')),
|
||||
('name', models.CharField(max_length=128, verbose_name='Name')),
|
||||
('priority', models.IntegerField(default=50, help_text='1-100, the lower the value will be match first', validators=[django.core.validators.MinValueValidator(1), django.core.validators.MaxValueValidator(100)], verbose_name='Priority')),
|
||||
('is_active', models.BooleanField(default=True, verbose_name='Active')),
|
||||
('comment', models.TextField(blank=True, default='', verbose_name='Comment')),
|
||||
('rules', models.JSONField(default=list, verbose_name='Rule')),
|
||||
('action', models.CharField(choices=[('reject', 'Reject'), ('allow', 'Allow')], default='reject', max_length=64, verbose_name='Action')),
|
||||
('user_groups', models.ManyToManyField(blank=True, related_name='connect_acls', to='users.UserGroup', verbose_name='User group')),
|
||||
('users', models.ManyToManyField(blank=True, related_name='connect_acls', to=settings.AUTH_USER_MODEL, verbose_name='User')),
|
||||
],
|
||||
options={
|
||||
'verbose_name': 'Connect acl',
|
||||
'ordering': ('priority', '-date_updated', 'name'),
|
||||
},
|
||||
),
|
||||
]
|
|
@ -0,0 +1,120 @@
|
|||
from django.db import models
|
||||
from django.core.cache import cache
|
||||
from django.utils.translation import ugettext_lazy as _
|
||||
|
||||
from common.utils.connection import get_redis_client
|
||||
from common.const.choices import ConnectMethodChoices
|
||||
from orgs.mixins.models import OrgManager, OrgModelMixin
|
||||
from .base import BaseACL, BaseACLQuerySet
|
||||
|
||||
|
||||
class ACLManager(OrgManager):
|
||||
|
||||
def valid(self):
|
||||
return self.get_queryset().valid()
|
||||
|
||||
|
||||
class ConnectACL(BaseACL, OrgModelMixin):
|
||||
ConnectACLUserCacheKey = 'CONNECT_ACL_USER_{}'
|
||||
ConnectACLUserCacheTTL = 600
|
||||
|
||||
class ActionChoices(models.TextChoices):
|
||||
reject = 'reject', _('Reject')
|
||||
|
||||
# 用户
|
||||
users = models.ManyToManyField(
|
||||
'users.User', related_name='connect_acls', blank=True,
|
||||
verbose_name=_("User")
|
||||
)
|
||||
user_groups = models.ManyToManyField(
|
||||
'users.UserGroup', related_name='connect_acls', blank=True,
|
||||
verbose_name=_("User group"),
|
||||
)
|
||||
rules = models.JSONField(default=list, verbose_name=_('Rule'))
|
||||
# 动作
|
||||
action = models.CharField(
|
||||
max_length=64, verbose_name=_('Action'),
|
||||
choices=ActionChoices.choices, default=ActionChoices.reject
|
||||
)
|
||||
|
||||
objects = ACLManager.from_queryset(BaseACLQuerySet)()
|
||||
|
||||
class Meta:
|
||||
ordering = ('priority', '-date_updated', 'name')
|
||||
verbose_name = _('Connect acl')
|
||||
|
||||
def __str__(self):
|
||||
return self.name
|
||||
|
||||
@property
|
||||
def rules_display(self):
|
||||
return ', '.join(
|
||||
[ConnectMethodChoices.get_label(i) for i in self.rules]
|
||||
)
|
||||
|
||||
def is_action(self, action):
|
||||
return self.action == action
|
||||
|
||||
@staticmethod
|
||||
def match(user, connect_type):
|
||||
if not user:
|
||||
return
|
||||
|
||||
user_acls = user.connect_acls.all().valid().distinct()
|
||||
for acl in user_acls:
|
||||
if connect_type in acl.rules:
|
||||
return acl
|
||||
|
||||
for user_group in user.groups.all():
|
||||
acls = user_group.connect_acls.all().valid().distinct()
|
||||
for acl in acls:
|
||||
if connect_type in acl.rules:
|
||||
return acl
|
||||
|
||||
def _get_all_rules_from_cache(self, user):
|
||||
find = False
|
||||
cache_key = self.ConnectACLUserCacheKey.format(user.id)
|
||||
rules = cache.get(cache_key)
|
||||
if rules is not None:
|
||||
find = True
|
||||
return rules, find
|
||||
|
||||
@staticmethod
|
||||
def _get_all_rules_from_db(user):
|
||||
connect_rules = set()
|
||||
user_acls = user.connect_acls.all().valid()
|
||||
user_acl_rules = user_acls.values_list('id', 'rules')
|
||||
for r_id, rule in user_acl_rules:
|
||||
connect_rules.update(rule)
|
||||
|
||||
for ug in user.groups.all():
|
||||
user_group_acls = ug.connect_acls.all().valid()
|
||||
user_group_rules = user_group_acls.values_list('id', 'rules')
|
||||
for r_id, rule in user_group_rules:
|
||||
connect_rules.update(rule)
|
||||
return list(connect_rules)
|
||||
|
||||
def set_all_rules_to_cache(self, key, rules):
|
||||
cache.set(key, rules, self.ConnectACLUserCacheTTL)
|
||||
|
||||
def all_rules(self, user):
|
||||
rules, find = self._get_all_rules_from_cache(user)
|
||||
if not find:
|
||||
rules = self._get_all_rules_from_db(user)
|
||||
self.set_all_rules_to_cache(
|
||||
self.ConnectACLUserCacheKey.format(user.id), rules
|
||||
)
|
||||
return rules
|
||||
|
||||
def clear_rules_cache(self):
|
||||
cache.delete_pattern(
|
||||
self.ConnectACLUserCacheKey.format('*')
|
||||
)
|
||||
|
||||
def save(self, *args, **kwargs):
|
||||
self.clear_rules_cache()
|
||||
return super().save(*args, **kwargs)
|
||||
|
||||
def delete(self, using=None, keep_parents=False):
|
||||
self.clear_rules_cache()
|
||||
return super().delete(using=using, keep_parents=keep_parents)
|
|
@ -53,12 +53,13 @@ class LoginACL(BaseACL):
|
|||
|
||||
@staticmethod
|
||||
def match(user, ip):
|
||||
acls = LoginACL.filter_acl(user)
|
||||
if not acls:
|
||||
acl_qs = LoginACL.filter_acl(user)
|
||||
if not acl_qs:
|
||||
return
|
||||
|
||||
for acl in acls:
|
||||
if acl.is_action(LoginACL.ActionChoices.confirm) and not acl.reviewers.exists():
|
||||
for acl in acl_qs:
|
||||
if acl.is_action(LoginACL.ActionChoices.confirm) and \
|
||||
not acl.reviewers.exists():
|
||||
continue
|
||||
ip_group = acl.rules.get('ip_group')
|
||||
time_periods = acl.rules.get('time_period')
|
||||
|
@ -79,12 +80,12 @@ class LoginACL(BaseACL):
|
|||
login_datetime = local_now_display()
|
||||
data = {
|
||||
'title': title,
|
||||
'type': const.TicketType.login_confirm,
|
||||
'applicant': self.user,
|
||||
'apply_login_city': login_city,
|
||||
'apply_login_ip': login_ip,
|
||||
'apply_login_datetime': login_datetime,
|
||||
'org_id': Organization.ROOT_ID,
|
||||
'apply_login_city': login_city,
|
||||
'apply_login_datetime': login_datetime,
|
||||
'type': const.TicketType.login_confirm,
|
||||
}
|
||||
ticket = ApplyLoginTicket.objects.create(**data)
|
||||
assignees = self.reviewers.all()
|
||||
|
|
|
@ -86,12 +86,12 @@ class LoginAssetACL(BaseACL, OrgModelMixin):
|
|||
title = _('Login asset confirm') + ' ({})'.format(user)
|
||||
data = {
|
||||
'title': title,
|
||||
'type': TicketType.login_asset_confirm,
|
||||
'org_id': org_id,
|
||||
'applicant': user,
|
||||
'apply_login_user': user,
|
||||
'apply_login_asset': asset,
|
||||
'apply_login_account': str(account),
|
||||
'org_id': org_id,
|
||||
'type': TicketType.login_asset_confirm,
|
||||
}
|
||||
ticket = ApplyLoginAssetTicket.objects.create(**data)
|
||||
ticket.open_by_system(assignees)
|
||||
|
|
|
@ -0,0 +1,36 @@
|
|||
from django.utils.translation import ugettext as _
|
||||
from rest_framework import serializers
|
||||
|
||||
from common.drf.serializers import BulkModelSerializer
|
||||
from common.const.choices import ConnectMethodChoices
|
||||
from ..models import ConnectACL
|
||||
|
||||
|
||||
__all__ = ['ConnectACLSerializer', ]
|
||||
|
||||
|
||||
class ConnectACLSerializer(BulkModelSerializer):
|
||||
action_display = serializers.ReadOnlyField(source='get_action_display', label=_('Action'))
|
||||
|
||||
class Meta:
|
||||
model = ConnectACL
|
||||
fields_mini = ['id', 'name']
|
||||
fields_small = fields_mini + [
|
||||
'priority', 'rules', 'rules_display', 'action', 'action_display', 'is_active',
|
||||
'date_created', 'date_updated', 'comment', 'created_by'
|
||||
]
|
||||
fields_m2m = ['users', 'user_groups']
|
||||
fields = fields_small + fields_m2m
|
||||
extra_kwargs = {
|
||||
'priority': {'default': 50},
|
||||
'is_active': {'default': True}
|
||||
}
|
||||
|
||||
@staticmethod
|
||||
def validate_rules(rules):
|
||||
for r in rules:
|
||||
label = ConnectMethodChoices.get_label(r)
|
||||
if not label:
|
||||
error = _('Invalid connection method: {}').format(r)
|
||||
raise serializers.ValidationError(error)
|
||||
return rules
|
|
@ -2,38 +2,57 @@ from django.utils.translation import ugettext as _
|
|||
from rest_framework import serializers
|
||||
from common.drf.serializers import BulkModelSerializer
|
||||
from common.drf.serializers import MethodSerializer
|
||||
from common.drf.fields import ObjectRelatedField
|
||||
from jumpserver.utils import has_valid_xpack_license
|
||||
from users.models import User
|
||||
from ..models import LoginACL
|
||||
from .rules import RuleSerializer
|
||||
|
||||
__all__ = ['LoginACLSerializer', ]
|
||||
__all__ = [
|
||||
"LoginACLSerializer",
|
||||
]
|
||||
|
||||
common_help_text = _('Format for comma-delimited string, with * indicating a match all. ')
|
||||
common_help_text = _(
|
||||
"Format for comma-delimited string, with * indicating a match all. "
|
||||
)
|
||||
|
||||
|
||||
class LoginACLSerializer(BulkModelSerializer):
|
||||
user_display = serializers.ReadOnlyField(source='user.username', label=_('Username'))
|
||||
reviewers_display = serializers.SerializerMethodField(label=_('Reviewers'))
|
||||
action_display = serializers.ReadOnlyField(source='get_action_display', label=_('Action'))
|
||||
reviewers_amount = serializers.IntegerField(read_only=True, source='reviewers.count')
|
||||
user = ObjectRelatedField(queryset=User.objects, label=_("User"))
|
||||
reviewers = ObjectRelatedField(
|
||||
queryset=User.objects, label=_("Reviewers"), many=True, required=False
|
||||
)
|
||||
action_display = serializers.ReadOnlyField(
|
||||
source="get_action_display", label=_("Action")
|
||||
)
|
||||
reviewers_amount = serializers.IntegerField(
|
||||
read_only=True, source="reviewers.count"
|
||||
)
|
||||
rules = MethodSerializer()
|
||||
|
||||
class Meta:
|
||||
model = LoginACL
|
||||
fields_mini = ['id', 'name']
|
||||
fields_mini = ["id", "name"]
|
||||
fields_small = fields_mini + [
|
||||
'priority', 'rules', 'action', 'action_display',
|
||||
'is_active', 'user', 'user_display',
|
||||
'date_created', 'date_updated', 'reviewers_amount',
|
||||
'comment', 'created_by'
|
||||
"priority",
|
||||
"rules",
|
||||
"action",
|
||||
"action_display",
|
||||
"is_active",
|
||||
"user",
|
||||
"date_created",
|
||||
"date_updated",
|
||||
"reviewers_amount",
|
||||
"comment",
|
||||
"created_by",
|
||||
]
|
||||
fields_fk = ['user', 'user_display']
|
||||
fields_m2m = ['reviewers', 'reviewers_display']
|
||||
fields_fk = ["user"]
|
||||
fields_m2m = ["reviewers"]
|
||||
fields = fields_small + fields_fk + fields_m2m
|
||||
extra_kwargs = {
|
||||
'priority': {'default': 50},
|
||||
'is_active': {'default': True},
|
||||
"reviewers": {'allow_null': False, 'required': True},
|
||||
"priority": {"default": 50},
|
||||
"is_active": {"default": True},
|
||||
"reviewers": {"allow_null": False, "required": True},
|
||||
}
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
|
@ -41,7 +60,7 @@ class LoginACLSerializer(BulkModelSerializer):
|
|||
self.set_action_choices()
|
||||
|
||||
def set_action_choices(self):
|
||||
action = self.fields.get('action')
|
||||
action = self.fields.get("action")
|
||||
if not action:
|
||||
return
|
||||
choices = action._choices
|
||||
|
@ -51,6 +70,3 @@ class LoginACLSerializer(BulkModelSerializer):
|
|||
|
||||
def get_rules_serializer(self):
|
||||
return RuleSerializer()
|
||||
|
||||
def get_reviewers_display(self, obj):
|
||||
return ','.join([str(user) for user in obj.reviewers.all()])
|
||||
|
|
|
@ -3,54 +3,66 @@ from django.utils.translation import ugettext_lazy as _
|
|||
|
||||
from orgs.mixins.serializers import BulkOrgResourceModelSerializer
|
||||
from orgs.models import Organization
|
||||
from assets.const import Protocol
|
||||
from common.drf.fields import LabeledChoiceField
|
||||
from acls import models
|
||||
|
||||
|
||||
__all__ = ['LoginAssetACLSerializer']
|
||||
__all__ = ["LoginAssetACLSerializer"]
|
||||
|
||||
|
||||
common_help_text = _('Format for comma-delimited string, with * indicating a match all. ')
|
||||
common_help_text = _(
|
||||
"Format for comma-delimited string, with * indicating a match all. "
|
||||
)
|
||||
|
||||
|
||||
class LoginAssetACLUsersSerializer(serializers.Serializer):
|
||||
username_group = serializers.ListField(
|
||||
default=['*'], child=serializers.CharField(max_length=128), label=_('Username'),
|
||||
help_text=common_help_text
|
||||
default=["*"],
|
||||
child=serializers.CharField(max_length=128),
|
||||
label=_("Username"),
|
||||
help_text=common_help_text,
|
||||
)
|
||||
|
||||
|
||||
class LoginAssetACLAssestsSerializer(serializers.Serializer):
|
||||
ip_group_help_text = _(
|
||||
'Format for comma-delimited string, with * indicating a match all. '
|
||||
'Such as: '
|
||||
'192.168.10.1, 192.168.1.0/24, 10.1.1.1-10.1.1.20, 2001:db8:2de::e13, 2001:db8:1a:1110::/64 '
|
||||
'(Domain name support)'
|
||||
"Format for comma-delimited string, with * indicating a match all. "
|
||||
"Such as: "
|
||||
"192.168.10.1, 192.168.1.0/24, 10.1.1.1-10.1.1.20, 2001:db8:2de::e13, 2001:db8:1a:1110::/64 "
|
||||
"(Domain name support)"
|
||||
)
|
||||
|
||||
ip_group = serializers.ListField(
|
||||
default=['*'], child=serializers.CharField(max_length=1024), label=_('IP'),
|
||||
help_text=ip_group_help_text
|
||||
default=["*"],
|
||||
child=serializers.CharField(max_length=1024),
|
||||
label=_("IP"),
|
||||
help_text=ip_group_help_text,
|
||||
)
|
||||
hostname_group = serializers.ListField(
|
||||
default=['*'], child=serializers.CharField(max_length=128), label=_('Hostname'),
|
||||
help_text=common_help_text
|
||||
default=["*"],
|
||||
child=serializers.CharField(max_length=128),
|
||||
label=_("Hostname"),
|
||||
help_text=common_help_text,
|
||||
)
|
||||
|
||||
|
||||
class LoginAssetACLAccountsSerializer(serializers.Serializer):
|
||||
protocol_group_help_text = _(
|
||||
'Format for comma-delimited string, with * indicating a match all. '
|
||||
'Protocol options: {}'
|
||||
"Format for comma-delimited string, with * indicating a match all. "
|
||||
"Protocol options: {}"
|
||||
)
|
||||
|
||||
name_group = serializers.ListField(
|
||||
default=['*'], child=serializers.CharField(max_length=128), label=_('Name'),
|
||||
help_text=common_help_text
|
||||
default=["*"],
|
||||
child=serializers.CharField(max_length=128),
|
||||
label=_("Name"),
|
||||
help_text=common_help_text,
|
||||
)
|
||||
username_group = serializers.ListField(
|
||||
default=['*'], child=serializers.CharField(max_length=128), label=_('Username'),
|
||||
help_text=common_help_text
|
||||
default=["*"],
|
||||
child=serializers.CharField(max_length=128),
|
||||
label=_("Username"),
|
||||
help_text=common_help_text,
|
||||
)
|
||||
|
||||
|
||||
|
@ -58,34 +70,48 @@ class LoginAssetACLSerializer(BulkOrgResourceModelSerializer):
|
|||
users = LoginAssetACLUsersSerializer()
|
||||
assets = LoginAssetACLAssestsSerializer()
|
||||
accounts = LoginAssetACLAccountsSerializer()
|
||||
reviewers_amount = serializers.IntegerField(read_only=True, source='reviewers.count')
|
||||
action_display = serializers.ReadOnlyField(source='get_action_display', label=_('Action'))
|
||||
reviewers_amount = serializers.IntegerField(
|
||||
read_only=True, source="reviewers.count"
|
||||
)
|
||||
action = LabeledChoiceField(
|
||||
choices=models.LoginAssetACL.ActionChoices.choices, label=_("Action")
|
||||
)
|
||||
|
||||
class Meta:
|
||||
model = models.LoginAssetACL
|
||||
fields_mini = ['id', 'name']
|
||||
fields_mini = ["id", "name"]
|
||||
fields_small = fields_mini + [
|
||||
'users', 'accounts', 'assets',
|
||||
'is_active', 'date_created', 'date_updated',
|
||||
'priority', 'action', 'action_display', 'comment', 'created_by', 'org_id'
|
||||
"users",
|
||||
"accounts",
|
||||
"assets",
|
||||
"is_active",
|
||||
"date_created",
|
||||
"date_updated",
|
||||
"priority",
|
||||
"action",
|
||||
"comment",
|
||||
"created_by",
|
||||
"org_id",
|
||||
]
|
||||
fields_m2m = ['reviewers', 'reviewers_amount']
|
||||
fields_m2m = ["reviewers", "reviewers_amount"]
|
||||
fields = fields_small + fields_m2m
|
||||
extra_kwargs = {
|
||||
"reviewers": {'allow_null': False, 'required': True},
|
||||
'priority': {'default': 50},
|
||||
'is_active': {'default': True},
|
||||
"reviewers": {"allow_null": False, "required": True},
|
||||
"priority": {"default": 50},
|
||||
"is_active": {"default": True},
|
||||
}
|
||||
|
||||
def validate_reviewers(self, reviewers):
|
||||
org_id = self.fields['org_id'].default()
|
||||
org_id = self.fields["org_id"].default()
|
||||
org = Organization.get_instance(org_id)
|
||||
if not org:
|
||||
error = _('The organization `{}` does not exist'.format(org_id))
|
||||
error = _("The organization `{}` does not exist".format(org_id))
|
||||
raise serializers.ValidationError(error)
|
||||
users = org.get_members()
|
||||
valid_reviewers = list(set(reviewers) & set(users))
|
||||
if not valid_reviewers:
|
||||
error = _('None of the reviewers belong to Organization `{}`'.format(org.name))
|
||||
error = _(
|
||||
"None of the reviewers belong to Organization `{}`".format(org.name)
|
||||
)
|
||||
raise serializers.ValidationError(error)
|
||||
return valid_reviewers
|
||||
|
|
|
@ -1,89 +1,89 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
#
|
||||
|
||||
import django_filters
|
||||
from rest_framework.decorators import action
|
||||
from rest_framework.response import Response
|
||||
|
||||
from common.utils import get_logger
|
||||
from assets import serializers
|
||||
from assets.models import Asset
|
||||
from assets.filters import IpInFilterBackend, LabelFilterBackend, NodeFilterBackend
|
||||
from assets.tasks import (
|
||||
push_accounts_to_assets, test_assets_connectivity_manual,
|
||||
update_assets_hardware_info_manual, verify_accounts_connectivity,
|
||||
)
|
||||
from common.drf.filters import BaseFilterSet
|
||||
from common.mixins.api import SuggestionMixin
|
||||
from orgs.mixins.api import OrgBulkModelViewSet
|
||||
from common.utils import get_logger
|
||||
from orgs.mixins import generics
|
||||
from assets import serializers
|
||||
from assets.models import Asset, Gateway
|
||||
from assets.tasks import (
|
||||
push_accounts_to_assets,
|
||||
verify_accounts_connectivity,
|
||||
test_assets_connectivity_manual,
|
||||
update_assets_hardware_info_manual,
|
||||
)
|
||||
from assets.filters import NodeFilterBackend, LabelFilterBackend, IpInFilterBackend
|
||||
from orgs.mixins.api import OrgBulkModelViewSet
|
||||
from ..mixin import NodeFilterMixin
|
||||
|
||||
logger = get_logger(__file__)
|
||||
__all__ = [
|
||||
'AssetViewSet', 'AssetTaskCreateApi', 'AssetsTaskCreateApi',
|
||||
"AssetViewSet",
|
||||
"AssetTaskCreateApi",
|
||||
"AssetsTaskCreateApi",
|
||||
]
|
||||
|
||||
|
||||
class AssetFilterSet(BaseFilterSet):
|
||||
type = django_filters.CharFilter(field_name='platform__type', lookup_expr='exact')
|
||||
category = django_filters.CharFilter(field_name='platform__category', lookup_expr='exact')
|
||||
hostname = django_filters.CharFilter(field_name='name', lookup_expr='exact')
|
||||
type = django_filters.CharFilter(field_name="platform__type", lookup_expr="exact")
|
||||
category = django_filters.CharFilter(
|
||||
field_name="platform__category", lookup_expr="exact"
|
||||
)
|
||||
hostname = django_filters.CharFilter(field_name="name", lookup_expr="exact")
|
||||
|
||||
class Meta:
|
||||
model = Asset
|
||||
fields = ['name', 'address', 'is_active', 'type', 'category', 'hostname']
|
||||
fields = ["name", "address", "is_active", "type", "category", "hostname"]
|
||||
|
||||
|
||||
class AssetViewSet(SuggestionMixin, NodeFilterMixin, OrgBulkModelViewSet):
|
||||
"""
|
||||
API endpoint that allows Asset to be viewed or edited.
|
||||
"""
|
||||
|
||||
model = Asset
|
||||
filterset_class = AssetFilterSet
|
||||
search_fields = ("name", "address")
|
||||
ordering_fields = ("name", "address")
|
||||
ordering = ('name',)
|
||||
ordering = ("name",)
|
||||
serializer_classes = (
|
||||
('default', serializers.AssetSerializer),
|
||||
('suggestion', serializers.MiniAssetSerializer),
|
||||
('platform', serializers.PlatformSerializer),
|
||||
('gateways', serializers.GatewayWithAuthSerializer)
|
||||
("default", serializers.AssetSerializer),
|
||||
("suggestion", serializers.MiniAssetSerializer),
|
||||
("platform", serializers.PlatformSerializer),
|
||||
("gateways", serializers.GatewayWithAuthSerializer),
|
||||
)
|
||||
rbac_perms = (
|
||||
('match', 'assets.match_asset'),
|
||||
('platform', 'assets.view_platform'),
|
||||
('gateways', 'assets.view_gateway')
|
||||
("match", "assets.match_asset"),
|
||||
("platform", "assets.view_platform"),
|
||||
("gateways", "assets.view_gateway"),
|
||||
)
|
||||
extra_filter_backends = [
|
||||
LabelFilterBackend,
|
||||
IpInFilterBackend,
|
||||
NodeFilterBackend
|
||||
]
|
||||
extra_filter_backends = [LabelFilterBackend, IpInFilterBackend, NodeFilterBackend]
|
||||
|
||||
@action(methods=['GET'], detail=True, url_path='platform')
|
||||
@action(methods=["GET"], detail=True, url_path="platform")
|
||||
def platform(self, *args, **kwargs):
|
||||
asset = self.get_object()
|
||||
serializer = self.get_serializer(asset.platform)
|
||||
return Response(serializer.data)
|
||||
|
||||
@action(methods=['GET'], detail=True, url_path='gateways')
|
||||
@action(methods=["GET"], detail=True, url_path="gateways")
|
||||
def gateways(self, *args, **kwargs):
|
||||
asset = self.get_object()
|
||||
if not asset.domain:
|
||||
gateways = Gateway.objects.none()
|
||||
gateways = Asset.objects.none()
|
||||
else:
|
||||
gateways = asset.domain.gateways.filter(protocol='ssh')
|
||||
gateways = asset.domain.gateways.filter(protocol="ssh")
|
||||
return self.get_paginated_response_from_queryset(gateways)
|
||||
|
||||
|
||||
class AssetsTaskMixin:
|
||||
def perform_assets_task(self, serializer):
|
||||
data = serializer.validated_data
|
||||
assets = data.get('assets', [])
|
||||
assets = data.get("assets", [])
|
||||
asset_ids = [asset.id for asset in assets]
|
||||
if data['action'] == "refresh":
|
||||
if data["action"] == "refresh":
|
||||
task = update_assets_hardware_info_manual.delay(asset_ids)
|
||||
else:
|
||||
task = test_assets_connectivity_manual.delay(asset_ids)
|
||||
|
@ -94,9 +94,9 @@ class AssetsTaskMixin:
|
|||
self.set_task_to_serializer_data(serializer, task)
|
||||
|
||||
def set_task_to_serializer_data(self, serializer, task):
|
||||
data = getattr(serializer, '_data', {})
|
||||
data = getattr(serializer, "_data", {})
|
||||
data["task"] = task.id
|
||||
setattr(serializer, '_data', data)
|
||||
setattr(serializer, "_data", data)
|
||||
|
||||
|
||||
class AssetTaskCreateApi(AssetsTaskMixin, generics.CreateAPIView):
|
||||
|
@ -104,18 +104,18 @@ class AssetTaskCreateApi(AssetsTaskMixin, generics.CreateAPIView):
|
|||
serializer_class = serializers.AssetTaskSerializer
|
||||
|
||||
def create(self, request, *args, **kwargs):
|
||||
pk = self.kwargs.get('pk')
|
||||
request.data['asset'] = pk
|
||||
request.data['assets'] = [pk]
|
||||
pk = self.kwargs.get("pk")
|
||||
request.data["asset"] = pk
|
||||
request.data["assets"] = [pk]
|
||||
return super().create(request, *args, **kwargs)
|
||||
|
||||
def check_permissions(self, request):
|
||||
action = request.data.get('action')
|
||||
action = request.data.get("action")
|
||||
action_perm_require = {
|
||||
'refresh': 'assets.refresh_assethardwareinfo',
|
||||
'push_account': 'assets.push_assetsystemuser',
|
||||
'test': 'assets.test_assetconnectivity',
|
||||
'test_account': 'assets.test_assetconnectivity'
|
||||
"refresh": "assets.refresh_assethardwareinfo",
|
||||
"push_account": "assets.push_assetsystemuser",
|
||||
"test": "assets.test_assetconnectivity",
|
||||
"test_account": "assets.test_assetconnectivity",
|
||||
}
|
||||
perm_required = action_perm_require.get(action)
|
||||
has = self.request.user.has_perm(perm_required)
|
||||
|
@ -126,19 +126,19 @@ class AssetTaskCreateApi(AssetsTaskMixin, generics.CreateAPIView):
|
|||
@staticmethod
|
||||
def perform_asset_task(serializer):
|
||||
data = serializer.validated_data
|
||||
if data['action'] not in ['push_system_user', 'test_system_user']:
|
||||
if data["action"] not in ["push_system_user", "test_system_user"]:
|
||||
return
|
||||
|
||||
asset = data['asset']
|
||||
accounts = data.get('accounts')
|
||||
asset = data["asset"]
|
||||
accounts = data.get("accounts")
|
||||
if not accounts:
|
||||
accounts = asset.accounts.all()
|
||||
|
||||
asset_ids = [asset.id]
|
||||
account_ids = accounts.values_list('id', flat=True)
|
||||
if action == 'push_account':
|
||||
account_ids = accounts.values_list("id", flat=True)
|
||||
if action == "push_account":
|
||||
task = push_accounts_to_assets.delay(account_ids, asset_ids)
|
||||
elif action == 'test_account':
|
||||
elif action == "test_account":
|
||||
task = verify_accounts_connectivity.delay(account_ids, asset_ids)
|
||||
else:
|
||||
task = None
|
||||
|
@ -156,9 +156,9 @@ class AssetsTaskCreateApi(AssetsTaskMixin, generics.CreateAPIView):
|
|||
serializer_class = serializers.AssetsTaskSerializer
|
||||
|
||||
def check_permissions(self, request):
|
||||
action = request.data.get('action')
|
||||
action = request.data.get("action")
|
||||
action_perm_require = {
|
||||
'refresh': 'assets.refresh_assethardwareinfo',
|
||||
"refresh": "assets.refresh_assethardwareinfo",
|
||||
}
|
||||
perm_required = action_perm_require.get(action)
|
||||
has = self.request.user.has_perm(perm_required)
|
||||
|
|
|
@ -1,4 +1,3 @@
|
|||
|
||||
from assets.models import Host
|
||||
from assets.serializers import HostSerializer
|
||||
from .asset import AssetViewSet
|
||||
|
|
|
@ -5,7 +5,6 @@ from rest_framework import status, mixins, viewsets
|
|||
|
||||
from orgs.mixins import generics
|
||||
from assets import serializers
|
||||
from assets.const import AutomationTypes
|
||||
from assets.tasks import execute_automation
|
||||
from assets.models import BaseAutomation, AutomationExecution
|
||||
from common.const.choices import Trigger
|
||||
|
@ -111,8 +110,7 @@ class AutomationExecutionViewSet(
|
|||
serializer.is_valid(raise_exception=True)
|
||||
automation = serializer.validated_data.get('automation')
|
||||
tp = serializer.validated_data.get('type')
|
||||
model = AutomationTypes.get_type_model(tp)
|
||||
task = execute_automation.delay(
|
||||
pid=automation.pk, trigger=Trigger.manual, model=model
|
||||
pid=automation.pk, trigger=Trigger.manual, tp=tp
|
||||
)
|
||||
return Response({'task': task.id}, status=status.HTTP_201_CREATED)
|
||||
|
|
|
@ -36,5 +36,5 @@ class ChangeSecretRecordViewSet(mixins.ListModelMixin, OrgGenericViewSet):
|
|||
execution = get_object_or_none(AutomationExecution, pk=eid)
|
||||
if execution:
|
||||
queryset = queryset.filter(execution=execution)
|
||||
queryset = queryset.order_by('is_success', '-date_start')
|
||||
queryset = queryset.order_by('-date_started')
|
||||
return queryset
|
||||
|
|
|
@ -7,21 +7,20 @@ from rest_framework.serializers import ValidationError
|
|||
|
||||
from common.utils import get_logger
|
||||
from orgs.mixins.api import OrgBulkModelViewSet
|
||||
from ..models import Domain, Gateway
|
||||
from ..models import Domain, Host
|
||||
from .. import serializers
|
||||
|
||||
|
||||
logger = get_logger(__file__)
|
||||
__all__ = ['DomainViewSet', 'GatewayViewSet', "GatewayTestConnectionApi"]
|
||||
|
||||
|
||||
class DomainViewSet(OrgBulkModelViewSet):
|
||||
model = Domain
|
||||
filterset_fields = ("name", )
|
||||
filterset_fields = ("name",)
|
||||
search_fields = filterset_fields
|
||||
serializer_class = serializers.DomainSerializer
|
||||
ordering_fields = ('name',)
|
||||
ordering = ('name', )
|
||||
ordering = ('name',)
|
||||
|
||||
def get_serializer_class(self):
|
||||
if self.request.query_params.get('gateway'):
|
||||
|
@ -30,21 +29,26 @@ class DomainViewSet(OrgBulkModelViewSet):
|
|||
|
||||
|
||||
class GatewayViewSet(OrgBulkModelViewSet):
|
||||
model = Gateway
|
||||
filterset_fields = ("domain__name", "name", "username", "domain")
|
||||
search_fields = ("domain__name", "name", "username", )
|
||||
filterset_fields = ("domain__name", "name", "domain")
|
||||
search_fields = ("domain__name",)
|
||||
serializer_class = serializers.GatewaySerializer
|
||||
|
||||
def get_queryset(self):
|
||||
queryset = Host.get_gateway_queryset()
|
||||
return queryset
|
||||
|
||||
|
||||
class GatewayTestConnectionApi(SingleObjectMixin, APIView):
|
||||
queryset = Gateway.objects.all()
|
||||
object = None
|
||||
rbac_perms = {
|
||||
'POST': 'assets.test_gateway'
|
||||
}
|
||||
|
||||
def get_queryset(self):
|
||||
queryset = Host.get_gateway_queryset()
|
||||
return queryset
|
||||
|
||||
def post(self, request, *args, **kwargs):
|
||||
self.object = self.get_object(Gateway.objects.all())
|
||||
self.object = self.get_object()
|
||||
local_port = self.request.data.get('port') or self.object.port
|
||||
try:
|
||||
local_port = int(local_port)
|
||||
|
|
|
@ -1,3 +1,5 @@
|
|||
from .base import *
|
||||
from .host import *
|
||||
from .types import *
|
||||
from .account import *
|
||||
from .protocol import *
|
||||
|
|
|
@ -1,5 +1,7 @@
|
|||
from .base import BaseType
|
||||
|
||||
GATEWAY_NAME = 'Gateway'
|
||||
|
||||
|
||||
class HostTypes(BaseType):
|
||||
LINUX = 'linux', 'Linux'
|
||||
|
@ -67,7 +69,7 @@ class HostTypes(BaseType):
|
|||
return {
|
||||
cls.LINUX: [
|
||||
{'name': 'Linux'},
|
||||
{'name': 'Gateway'}
|
||||
{'name': GATEWAY_NAME}
|
||||
],
|
||||
cls.UNIX: [
|
||||
{'name': 'Unix'},
|
||||
|
|
|
@ -1,13 +1,14 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
#
|
||||
from django.db.models import Q
|
||||
from django_filters import rest_framework as drf_filters
|
||||
from rest_framework import filters
|
||||
from rest_framework.compat import coreapi, coreschema
|
||||
from django_filters import rest_framework as drf_filters
|
||||
|
||||
from assets.utils import get_node_from_request, is_query_node_all_assets
|
||||
from common.drf.filters import BaseFilterSet
|
||||
from assets.utils import is_query_node_all_assets, get_node_from_request
|
||||
from .models import Label, Node, Account
|
||||
|
||||
from .models import Account, Label, Node
|
||||
|
||||
|
||||
class AssetByNodeFilterBackend(filters.BaseFilterBackend):
|
||||
|
|
|
@ -0,0 +1,18 @@
|
|||
# Generated by Django 3.2.14 on 2022-11-11 11:19
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('assets', '0110_changesecretrecord_asset'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AlterField(
|
||||
model_name='automationexecution',
|
||||
name='status',
|
||||
field=models.CharField(default='pending', max_length=16, verbose_name='Status'),
|
||||
),
|
||||
]
|
|
@ -0,0 +1,73 @@
|
|||
# Generated by Django 3.2.13 on 2022-09-29 11:03
|
||||
|
||||
from django.db import migrations
|
||||
from assets.const.host import GATEWAY_NAME
|
||||
|
||||
|
||||
def _create_account_obj(secret, secret_type, gateway, asset, account_model):
|
||||
return account_model(
|
||||
asset=asset,
|
||||
secret=secret,
|
||||
org_id=gateway.org_id,
|
||||
secret_type=secret_type,
|
||||
username=gateway.username,
|
||||
name=f'{gateway.name}-{secret_type}-{GATEWAY_NAME.lower()}',
|
||||
)
|
||||
|
||||
|
||||
def migrate_gateway_to_asset(apps, schema_editor):
|
||||
db_alias = schema_editor.connection.alias
|
||||
gateway_model = apps.get_model('assets', 'Gateway')
|
||||
platform_model = apps.get_model('assets', 'Platform')
|
||||
gateway_platform = platform_model.objects.using(db_alias).get(name=GATEWAY_NAME)
|
||||
|
||||
print('>>> migrate gateway to asset')
|
||||
asset_dict = {}
|
||||
host_model = apps.get_model('assets', 'Host')
|
||||
asset_model = apps.get_model('assets', 'Asset')
|
||||
protocol_model = apps.get_model('assets', 'Protocol')
|
||||
gateways = gateway_model.objects.all()
|
||||
for gateway in gateways:
|
||||
comment = gateway.comment if gateway.comment else ''
|
||||
data = {
|
||||
'comment': comment,
|
||||
'name': f'{gateway.name}-{GATEWAY_NAME.lower()}',
|
||||
'address': gateway.ip,
|
||||
'domain': gateway.domain,
|
||||
'org_id': gateway.org_id,
|
||||
'is_active': gateway.is_active,
|
||||
'platform': gateway_platform,
|
||||
}
|
||||
asset = asset_model.objects.using(db_alias).create(**data)
|
||||
asset_dict[gateway.id] = asset
|
||||
protocol_model.objects.using(db_alias).create(name='ssh', port=gateway.port, asset=asset)
|
||||
hosts = [host_model(asset_ptr=asset) for asset in asset_dict.values()]
|
||||
host_model.objects.using(db_alias).bulk_create(hosts, ignore_conflicts=True)
|
||||
|
||||
print('>>> migrate gateway to account')
|
||||
accounts = []
|
||||
account_model = apps.get_model('assets', 'Account')
|
||||
for gateway in gateways:
|
||||
password = gateway.password
|
||||
private_key = gateway.private_key
|
||||
asset = asset_dict[gateway.id]
|
||||
if password:
|
||||
accounts.append(_create_account_obj(
|
||||
password, 'password', gateway, asset, account_model
|
||||
))
|
||||
|
||||
if private_key:
|
||||
accounts.append(_create_account_obj(
|
||||
private_key, 'ssh_key', gateway, asset, account_model
|
||||
))
|
||||
account_model.objects.using(db_alias).bulk_create(accounts)
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
dependencies = [
|
||||
('assets', '0111_alter_automationexecution_status'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.RunPython(migrate_gateway_to_asset),
|
||||
]
|
|
@ -3,7 +3,8 @@ from django.utils.translation import gettext_lazy as _
|
|||
from simple_history.models import HistoricalRecords
|
||||
|
||||
from common.utils import lazyproperty
|
||||
from .base import BaseAccount, AbsConnectivity
|
||||
|
||||
from .base import AbsConnectivity, BaseAccount
|
||||
|
||||
__all__ = ['Account', 'AccountTemplate']
|
||||
|
||||
|
@ -40,9 +41,10 @@ class AccountHistoricalRecords(HistoricalRecords):
|
|||
|
||||
|
||||
class Account(AbsConnectivity, BaseAccount):
|
||||
class InnerAccount(models.TextChoices):
|
||||
INPUT = '@INPUT', '@INPUT'
|
||||
USER = '@USER', '@USER'
|
||||
class AliasAccount(models.TextChoices):
|
||||
ALL = '@ALL', _('All')
|
||||
INPUT = '@INPUT', _('Manual input')
|
||||
USER = '@USER', _('Dynamic user')
|
||||
|
||||
asset = models.ForeignKey(
|
||||
'assets.Asset', related_name='accounts',
|
||||
|
@ -76,14 +78,14 @@ class Account(AbsConnectivity, BaseAccount):
|
|||
return '{}'.format(self.username)
|
||||
|
||||
@classmethod
|
||||
def get_input_account(cls):
|
||||
def get_manual_account(cls):
|
||||
""" @INPUT 手动登录的账号(any) """
|
||||
return cls(name=cls.InnerAccount.INPUT.value, username='')
|
||||
return cls(name=cls.AliasAccount.INPUT.label, username=cls.AliasAccount.INPUT.value, secret=None)
|
||||
|
||||
@classmethod
|
||||
def get_user_account(cls, username):
|
||||
""" @USER 动态用户的账号(self) """
|
||||
return cls(name=cls.InnerAccount.USER.value, username=username)
|
||||
return cls(name=cls.AliasAccount.USER.label, username=cls.AliasAccount.USER.value)
|
||||
|
||||
|
||||
class AccountTemplate(BaseAccount):
|
||||
|
|
|
@ -2,8 +2,8 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
#
|
||||
|
||||
import logging
|
||||
import uuid
|
||||
import logging
|
||||
from collections import defaultdict
|
||||
|
||||
from django.db import models
|
||||
|
|
|
@ -1,6 +1,12 @@
|
|||
from assets.const import Category
|
||||
from assets.const import GATEWAY_NAME
|
||||
from .common import Asset
|
||||
|
||||
|
||||
class Host(Asset):
|
||||
pass
|
||||
|
||||
@classmethod
|
||||
def get_gateway_queryset(cls):
|
||||
queryset = cls.objects.filter(
|
||||
platform__name=GATEWAY_NAME
|
||||
)
|
||||
return queryset
|
||||
|
|
|
@ -47,7 +47,7 @@ class BaseAutomation(CommonModelMixin, PeriodTaskModelMixin, OrgModelMixin):
|
|||
def get_register_task(self):
|
||||
name = f"automation_{self.type}_strategy_period_{str(self.id)[:8]}"
|
||||
task = execute_automation.name
|
||||
args = (str(self.id), Trigger.timing, self._meta.model)
|
||||
args = (str(self.id), Trigger.timing, self.type)
|
||||
kwargs = {}
|
||||
return name, task, args, kwargs
|
||||
|
||||
|
|
|
@ -65,3 +65,9 @@ class ChangeSecretRecord(JMSBaseModel):
|
|||
|
||||
def __str__(self):
|
||||
return self.account.__str__()
|
||||
|
||||
@property
|
||||
def timedelta(self):
|
||||
if self.date_started and self.date_finished:
|
||||
return self.date_finished - self.date_started
|
||||
return None
|
||||
|
|
|
@ -6,10 +6,10 @@ import sshpubkeys
|
|||
from hashlib import md5
|
||||
|
||||
from django.db import models
|
||||
from django.utils import timezone
|
||||
from django.utils.translation import ugettext_lazy as _
|
||||
from django.conf import settings
|
||||
from django.utils import timezone
|
||||
from django.db.models import QuerySet
|
||||
from django.utils.translation import ugettext_lazy as _
|
||||
|
||||
from common.utils import (
|
||||
ssh_key_string_to_obj, ssh_key_gen, get_logger,
|
||||
|
|
|
@ -1,22 +1,25 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
#
|
||||
import socket
|
||||
import uuid
|
||||
import socket
|
||||
import random
|
||||
|
||||
from django.core.cache import cache
|
||||
import paramiko
|
||||
|
||||
from django.db import models
|
||||
from django.core.cache import cache
|
||||
from django.db.models.query import QuerySet
|
||||
from django.utils.translation import ugettext_lazy as _
|
||||
|
||||
from common.utils import get_logger, lazyproperty
|
||||
from common.db import fields
|
||||
from common.utils import get_logger, lazyproperty
|
||||
from orgs.mixins.models import OrgModelMixin
|
||||
from assets.models import Host
|
||||
from .base import BaseAccount
|
||||
from ..const import SecretType
|
||||
|
||||
logger = get_logger(__file__)
|
||||
|
||||
__all__ = ['Domain', 'Gateway']
|
||||
__all__ = ['Domain', 'GatewayMixin']
|
||||
|
||||
|
||||
class Domain(OrgModelMixin):
|
||||
|
@ -33,12 +36,9 @@ class Domain(OrgModelMixin):
|
|||
def __str__(self):
|
||||
return self.name
|
||||
|
||||
def has_gateway(self):
|
||||
return self.gateway_set.filter(is_active=True).exists()
|
||||
|
||||
@lazyproperty
|
||||
def gateways(self):
|
||||
return self.gateway_set.filter(is_active=True)
|
||||
return Host.get_gateway_queryset().filter(domain=self, is_active=True)
|
||||
|
||||
def select_gateway(self):
|
||||
return self.random_gateway()
|
||||
|
@ -53,18 +53,141 @@ class Domain(OrgModelMixin):
|
|||
return random.choice(self.gateways)
|
||||
|
||||
|
||||
class Gateway(BaseAccount):
|
||||
UNCONNECTIVE_KEY_TMPL = 'asset_unconnective_gateway_{}'
|
||||
UNCONNECTIVE_SILENCE_PERIOD_KEY_TMPL = 'asset_unconnective_gateway_silence_period_{}'
|
||||
UNCONNECTIVE_SILENCE_PERIOD_BEGIN_VALUE = 60 * 5
|
||||
class GatewayMixin:
|
||||
id: uuid.UUID
|
||||
port: int
|
||||
address: str
|
||||
accounts: QuerySet
|
||||
private_key_path: str
|
||||
private_key_obj: paramiko.RSAKey
|
||||
UNCONNECTED_KEY_TMPL = 'asset_unconnective_gateway_{}'
|
||||
UNCONNECTED_SILENCE_PERIOD_KEY_TMPL = 'asset_unconnective_gateway_silence_period_{}'
|
||||
UNCONNECTED_SILENCE_PERIOD_BEGIN_VALUE = 60 * 5
|
||||
|
||||
def set_unconnected(self):
|
||||
unconnected_key = self.UNCONNECTED_KEY_TMPL.format(self.id)
|
||||
unconnected_silence_period_key = self.UNCONNECTED_SILENCE_PERIOD_KEY_TMPL.format(self.id)
|
||||
unconnected_silence_period = cache.get(
|
||||
unconnected_silence_period_key, self.UNCONNECTED_SILENCE_PERIOD_BEGIN_VALUE
|
||||
)
|
||||
cache.set(unconnected_silence_period_key, unconnected_silence_period * 2)
|
||||
cache.set(unconnected_key, unconnected_silence_period, unconnected_silence_period)
|
||||
|
||||
def set_connective(self):
|
||||
unconnected_key = self.UNCONNECTED_KEY_TMPL.format(self.id)
|
||||
unconnected_silence_period_key = self.UNCONNECTED_SILENCE_PERIOD_KEY_TMPL.format(self.id)
|
||||
|
||||
cache.delete(unconnected_key)
|
||||
cache.delete(unconnected_silence_period_key)
|
||||
|
||||
def get_is_unconnected(self):
|
||||
unconnected_key = self.UNCONNECTED_KEY_TMPL.format(self.id)
|
||||
return cache.get(unconnected_key, False)
|
||||
|
||||
@property
|
||||
def is_connective(self):
|
||||
return not self.get_is_unconnected()
|
||||
|
||||
@is_connective.setter
|
||||
def is_connective(self, value):
|
||||
if value:
|
||||
self.set_connective()
|
||||
else:
|
||||
self.set_unconnected()
|
||||
|
||||
def test_connective(self, local_port=None):
|
||||
# TODO 走ansible runner
|
||||
if local_port is None:
|
||||
local_port = self.port
|
||||
|
||||
client = paramiko.SSHClient()
|
||||
client.set_missing_host_key_policy(paramiko.AutoAddPolicy())
|
||||
proxy = paramiko.SSHClient()
|
||||
proxy.set_missing_host_key_policy(paramiko.AutoAddPolicy())
|
||||
|
||||
try:
|
||||
proxy.connect(self.address, port=self.port,
|
||||
username=self.username,
|
||||
password=self.password,
|
||||
pkey=self.private_key_obj)
|
||||
except(paramiko.AuthenticationException,
|
||||
paramiko.BadAuthenticationType,
|
||||
paramiko.SSHException,
|
||||
paramiko.ChannelException,
|
||||
paramiko.ssh_exception.NoValidConnectionsError,
|
||||
socket.gaierror) as e:
|
||||
err = str(e)
|
||||
if err.startswith('[Errno None] Unable to connect to port'):
|
||||
err = _('Unable to connect to port {port} on {address}')
|
||||
err = err.format(port=self.port, ip=self.address)
|
||||
elif err == 'Authentication failed.':
|
||||
err = _('Authentication failed')
|
||||
elif err == 'Connect failed':
|
||||
err = _('Connect failed')
|
||||
self.is_connective = False
|
||||
return False, err
|
||||
|
||||
try:
|
||||
sock = proxy.get_transport().open_channel(
|
||||
'direct-tcpip', ('127.0.0.1', local_port), ('127.0.0.1', 0)
|
||||
)
|
||||
client.connect("127.0.0.1", port=local_port,
|
||||
username=self.username,
|
||||
password=self.password,
|
||||
key_filename=self.private_key_path,
|
||||
sock=sock,
|
||||
timeout=5)
|
||||
except (paramiko.SSHException,
|
||||
paramiko.ssh_exception.SSHException,
|
||||
paramiko.ChannelException,
|
||||
paramiko.AuthenticationException,
|
||||
TimeoutError) as e:
|
||||
|
||||
err = getattr(e, 'text', str(e))
|
||||
if err == 'Connect failed':
|
||||
err = _('Connect failed')
|
||||
self.is_connective = False
|
||||
return False, err
|
||||
finally:
|
||||
client.close()
|
||||
self.is_connective = True
|
||||
return True, None
|
||||
|
||||
@lazyproperty
|
||||
def username(self):
|
||||
account = self.accounts.all().first()
|
||||
if account:
|
||||
return account.username
|
||||
logger.error(f'Gateway {self} has no account')
|
||||
return ''
|
||||
|
||||
def get_secret(self, secret_type):
|
||||
account = self.accounts.filter(secret_type=secret_type).first()
|
||||
if account:
|
||||
return account.secret
|
||||
logger.error(f'Gateway {self} has no {secret_type} account')
|
||||
|
||||
@lazyproperty
|
||||
def password(self):
|
||||
secret_type = SecretType.PASSWORD
|
||||
return self.get_secret(secret_type)
|
||||
|
||||
@lazyproperty
|
||||
def private_key(self):
|
||||
secret_type = SecretType.SSH_KEY
|
||||
return self.get_secret(secret_type)
|
||||
|
||||
|
||||
class Gateway(BaseAccount):
|
||||
class Protocol(models.TextChoices):
|
||||
ssh = 'ssh', 'SSH'
|
||||
|
||||
name = models.CharField(max_length=128, verbose_name='Name')
|
||||
ip = models.CharField(max_length=128, verbose_name=_('IP'), db_index=True)
|
||||
port = models.IntegerField(default=22, verbose_name=_('Port'))
|
||||
protocol = models.CharField(choices=Protocol.choices, max_length=16, default=Protocol.ssh, verbose_name=_("Protocol"))
|
||||
protocol = models.CharField(
|
||||
choices=Protocol.choices, max_length=16, default=Protocol.ssh, verbose_name=_("Protocol")
|
||||
)
|
||||
domain = models.ForeignKey(Domain, on_delete=models.CASCADE, verbose_name=_("Domain"))
|
||||
comment = models.CharField(max_length=128, blank=True, null=True, verbose_name=_("Comment"))
|
||||
is_active = models.BooleanField(default=True, verbose_name=_("Is active"))
|
||||
|
@ -85,91 +208,3 @@ class Gateway(BaseAccount):
|
|||
permissions = [
|
||||
('test_gateway', _('Test gateway'))
|
||||
]
|
||||
|
||||
def set_unconnective(self):
|
||||
unconnective_key = self.UNCONNECTIVE_KEY_TMPL.format(self.id)
|
||||
unconnective_silence_period_key = self.UNCONNECTIVE_SILENCE_PERIOD_KEY_TMPL.format(self.id)
|
||||
|
||||
unconnective_silence_period = cache.get(unconnective_silence_period_key,
|
||||
self.UNCONNECTIVE_SILENCE_PERIOD_BEGIN_VALUE)
|
||||
cache.set(unconnective_silence_period_key, unconnective_silence_period * 2)
|
||||
cache.set(unconnective_key, unconnective_silence_period, unconnective_silence_period)
|
||||
|
||||
def set_connective(self):
|
||||
unconnective_key = self.UNCONNECTIVE_KEY_TMPL.format(self.id)
|
||||
unconnective_silence_period_key = self.UNCONNECTIVE_SILENCE_PERIOD_KEY_TMPL.format(self.id)
|
||||
|
||||
cache.delete(unconnective_key)
|
||||
cache.delete(unconnective_silence_period_key)
|
||||
|
||||
def get_is_unconnective(self):
|
||||
unconnective_key = self.UNCONNECTIVE_KEY_TMPL.format(self.id)
|
||||
return cache.get(unconnective_key, False)
|
||||
|
||||
@property
|
||||
def is_connective(self):
|
||||
return not self.get_is_unconnective()
|
||||
|
||||
@is_connective.setter
|
||||
def is_connective(self, value):
|
||||
if value:
|
||||
self.set_connective()
|
||||
else:
|
||||
self.set_unconnective()
|
||||
|
||||
def test_connective(self, local_port=None):
|
||||
if local_port is None:
|
||||
local_port = self.port
|
||||
|
||||
client = paramiko.SSHClient()
|
||||
client.set_missing_host_key_policy(paramiko.AutoAddPolicy())
|
||||
proxy = paramiko.SSHClient()
|
||||
proxy.set_missing_host_key_policy(paramiko.AutoAddPolicy())
|
||||
|
||||
try:
|
||||
proxy.connect(self.ip, port=self.port,
|
||||
username=self.username,
|
||||
password=self.password,
|
||||
pkey=self.private_key_obj)
|
||||
except(paramiko.AuthenticationException,
|
||||
paramiko.BadAuthenticationType,
|
||||
paramiko.SSHException,
|
||||
paramiko.ChannelException,
|
||||
paramiko.ssh_exception.NoValidConnectionsError,
|
||||
socket.gaierror) as e:
|
||||
err = str(e)
|
||||
if err.startswith('[Errno None] Unable to connect to port'):
|
||||
err = _('Unable to connect to port {port} on {address}')
|
||||
err = err.format(port=self.port, ip=self.ip)
|
||||
elif err == 'Authentication failed.':
|
||||
err = _('Authentication failed')
|
||||
elif err == 'Connect failed':
|
||||
err = _('Connect failed')
|
||||
self.is_connective = False
|
||||
return False, err
|
||||
|
||||
try:
|
||||
sock = proxy.get_transport().open_channel(
|
||||
'direct-tcpip', ('127.0.0.1', local_port), ('127.0.0.1', 0)
|
||||
)
|
||||
client.connect("127.0.0.1", port=local_port,
|
||||
username=self.username,
|
||||
password=self.password,
|
||||
key_filename=self.private_key_file,
|
||||
sock=sock,
|
||||
timeout=5)
|
||||
except (paramiko.SSHException,
|
||||
paramiko.ssh_exception.SSHException,
|
||||
paramiko.ChannelException,
|
||||
paramiko.AuthenticationException,
|
||||
TimeoutError) as e:
|
||||
|
||||
err = getattr(e, 'text', str(e))
|
||||
if err == 'Connect failed':
|
||||
err = _('Connect failed')
|
||||
self.is_connective = False
|
||||
return False, err
|
||||
finally:
|
||||
client.close()
|
||||
self.is_connective = True
|
||||
return True, None
|
||||
|
|
|
@ -15,7 +15,7 @@ from ...const import Category, AllTypes
|
|||
|
||||
__all__ = [
|
||||
'AssetSerializer', 'AssetSimpleSerializer', 'MiniAssetSerializer',
|
||||
'AssetTaskSerializer', 'AssetsTaskSerializer',
|
||||
'AssetTaskSerializer', 'AssetsTaskSerializer', 'AssetProtocolsSerializer',
|
||||
]
|
||||
|
||||
|
||||
|
|
|
@ -42,6 +42,19 @@ class ChangeSecretAutomationSerializer(AuthValidateMixin, BaseAutomationSerializ
|
|||
)},
|
||||
}}
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
super().__init__(*args, **kwargs)
|
||||
self.set_secret_type_choices()
|
||||
|
||||
def set_secret_type_choices(self):
|
||||
secret_type = self.fields.get('secret_type')
|
||||
if not secret_type:
|
||||
return
|
||||
choices = secret_type._choices
|
||||
choices.pop(SecretType.ACCESS_KEY, None)
|
||||
choices.pop(SecretType.TOKEN, None)
|
||||
secret_type._choices = choices
|
||||
|
||||
def validate_password_rules(self, password_rules):
|
||||
secret_type = self.initial_secret_type
|
||||
if secret_type != SecretType.PASSWORD:
|
||||
|
@ -93,8 +106,8 @@ class ChangeSecretRecordSerializer(serializers.ModelSerializer):
|
|||
class Meta:
|
||||
model = ChangeSecretRecord
|
||||
fields = [
|
||||
'id', 'asset', 'account', 'date_started',
|
||||
'date_finished', 'is_success', 'error', 'execution',
|
||||
'id', 'asset', 'account', 'date_started', 'date_finished',
|
||||
'timedelta', 'is_success', 'error', 'execution',
|
||||
]
|
||||
read_only_fields = fields
|
||||
|
||||
|
|
|
@ -1,30 +1,33 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
#
|
||||
from rest_framework import serializers
|
||||
from rest_framework.generics import get_object_or_404
|
||||
from django.utils.translation import ugettext_lazy as _
|
||||
|
||||
from common.validators import alphanumeric
|
||||
from orgs.mixins.serializers import BulkOrgResourceModelSerializer
|
||||
from common.drf.serializers import SecretReadableMixin
|
||||
from ..models import Domain, Gateway
|
||||
from .base import AuthValidateMixin
|
||||
from common.drf.fields import ObjectRelatedField, EncryptedField
|
||||
from assets.const import SecretType
|
||||
from ..models import Domain, Asset, Account
|
||||
from ..serializers import HostSerializer
|
||||
from .utils import validate_password_for_ansible, validate_ssh_key
|
||||
|
||||
|
||||
class DomainSerializer(BulkOrgResourceModelSerializer):
|
||||
asset_count = serializers.SerializerMethodField(label=_('Assets amount'))
|
||||
gateway_count = serializers.SerializerMethodField(label=_('Gateways count'))
|
||||
assets = ObjectRelatedField(
|
||||
many=True, required=False, queryset=Asset.objects, label=_('Asset')
|
||||
)
|
||||
|
||||
class Meta:
|
||||
model = Domain
|
||||
fields_mini = ['id', 'name']
|
||||
fields_small = fields_mini + [
|
||||
'comment', 'date_created'
|
||||
]
|
||||
fields_m2m = [
|
||||
'asset_count', 'assets', 'gateway_count',
|
||||
]
|
||||
fields = fields_small + fields_m2m
|
||||
read_only_fields = ('asset_count', 'gateway_count', 'date_created')
|
||||
fields_small = fields_mini + ['comment']
|
||||
fields_m2m = ['assets']
|
||||
read_only_fields = ['asset_count', 'gateway_count', 'date_created']
|
||||
fields = fields_small + fields_m2m + read_only_fields
|
||||
|
||||
extra_kwargs = {
|
||||
'assets': {'required': False, 'label': _('Assets')},
|
||||
}
|
||||
|
@ -35,32 +38,89 @@ class DomainSerializer(BulkOrgResourceModelSerializer):
|
|||
|
||||
@staticmethod
|
||||
def get_gateway_count(obj):
|
||||
return obj.gateway_set.all().count()
|
||||
return obj.gateways.count()
|
||||
|
||||
|
||||
class GatewaySerializer(AuthValidateMixin, BulkOrgResourceModelSerializer):
|
||||
is_connective = serializers.BooleanField(required=False, label=_('Connectivity'))
|
||||
class GatewaySerializer(HostSerializer):
|
||||
password = EncryptedField(
|
||||
label=_('Password'), required=False, allow_blank=True, allow_null=True, max_length=1024,
|
||||
validators=[validate_password_for_ansible], write_only=True
|
||||
)
|
||||
private_key = EncryptedField(
|
||||
label=_('SSH private key'), required=False, allow_blank=True, allow_null=True,
|
||||
max_length=16384, write_only=True
|
||||
)
|
||||
passphrase = serializers.CharField(
|
||||
label=_('Key password'), allow_blank=True, allow_null=True, required=False, write_only=True,
|
||||
max_length=512,
|
||||
)
|
||||
username = serializers.CharField(
|
||||
label=_('Username'), allow_blank=True, max_length=128, required=True,
|
||||
)
|
||||
|
||||
class Meta:
|
||||
model = Gateway
|
||||
fields_mini = ['id', 'username']
|
||||
fields_write_only = [
|
||||
'password', 'private_key', 'public_key', 'passphrase'
|
||||
class Meta(HostSerializer.Meta):
|
||||
fields = HostSerializer.Meta.fields + [
|
||||
'username', 'password', 'private_key', 'passphrase'
|
||||
]
|
||||
fields_small = fields_mini + fields_write_only + [
|
||||
'ip', 'port', 'protocol',
|
||||
'is_active', 'is_connective',
|
||||
'date_created', 'date_updated',
|
||||
'created_by', 'comment',
|
||||
]
|
||||
fields_fk = ['domain']
|
||||
fields = fields_small + fields_fk
|
||||
extra_kwargs = {
|
||||
'username': {"validators": [alphanumeric]},
|
||||
'password': {'write_only': True},
|
||||
'private_key': {"write_only": True},
|
||||
'public_key': {"write_only": True},
|
||||
|
||||
def validate_private_key(self, secret):
|
||||
if not secret:
|
||||
return
|
||||
passphrase = self.initial_data.get('passphrase')
|
||||
passphrase = passphrase if passphrase else None
|
||||
validate_ssh_key(secret, passphrase)
|
||||
return secret
|
||||
|
||||
@staticmethod
|
||||
def clean_auth_fields(validated_data):
|
||||
username = validated_data.pop('username', None)
|
||||
password = validated_data.pop('password', None)
|
||||
private_key = validated_data.pop('private_key', None)
|
||||
validated_data.pop('passphrase', None)
|
||||
return username, password, private_key
|
||||
|
||||
@staticmethod
|
||||
def create_accounts(instance, username, password, private_key):
|
||||
account_name = f'{instance.name}-{_("Gateway")}'
|
||||
account_data = {
|
||||
'privileged': True,
|
||||
'name': account_name,
|
||||
'username': username,
|
||||
'asset_id': instance.id,
|
||||
'created_by': instance.created_by
|
||||
}
|
||||
if password:
|
||||
Account.objects.create(
|
||||
**account_data, secret=password, secret_type=SecretType.PASSWORD
|
||||
)
|
||||
if private_key:
|
||||
Account.objects.create(
|
||||
**account_data, secret=private_key, secret_type=SecretType.SSH_KEY
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def update_accounts(instance, username, password, private_key):
|
||||
accounts = instance.accounts.filter(username=username)
|
||||
if password:
|
||||
account = get_object_or_404(accounts, SecretType.PASSWORD)
|
||||
account.secret = password
|
||||
account.save()
|
||||
if private_key:
|
||||
account = get_object_or_404(accounts, SecretType.SSH_KEY)
|
||||
account.secret = private_key
|
||||
account.save()
|
||||
|
||||
def create(self, validated_data):
|
||||
auth_fields = self.clean_auth_fields(validated_data)
|
||||
instance = super().create(validated_data)
|
||||
self.create_accounts(instance, *auth_fields)
|
||||
return instance
|
||||
|
||||
def update(self, instance, validated_data):
|
||||
auth_fields = self.clean_auth_fields(validated_data)
|
||||
instance = super().update(instance, validated_data)
|
||||
self.update_accounts(instance, *auth_fields)
|
||||
return instance
|
||||
|
||||
|
||||
class GatewayWithAuthSerializer(SecretReadableMixin, GatewaySerializer):
|
||||
|
|
|
@ -1,11 +0,0 @@
|
|||
from rest_framework import serializers
|
||||
from django.utils.translation import gettext_lazy as _
|
||||
|
||||
|
||||
class CategoryDisplayMixin(serializers.Serializer):
|
||||
category_display = serializers.ReadOnlyField(
|
||||
source='get_category_display', label=_("Category display")
|
||||
)
|
||||
type_display = serializers.ReadOnlyField(
|
||||
source='get_type_display', label=_("Type display")
|
||||
)
|
|
@ -1,61 +1,75 @@
|
|||
from rest_framework import serializers
|
||||
from django.utils.translation import gettext_lazy as _
|
||||
from rest_framework import serializers
|
||||
|
||||
from common.drf.fields import LabeledChoiceField
|
||||
from common.drf.serializers import WritableNestedModelSerializer
|
||||
from ..models import Platform, PlatformProtocol, PlatformAutomation
|
||||
from ..const import Category, AllTypes
|
||||
from ..models import Platform, PlatformProtocol, PlatformAutomation
|
||||
|
||||
__all__ = ['PlatformSerializer', 'PlatformOpsMethodSerializer']
|
||||
__all__ = ["PlatformSerializer", "PlatformOpsMethodSerializer"]
|
||||
|
||||
|
||||
class ProtocolSettingSerializer(serializers.Serializer):
|
||||
SECURITY_CHOICES = [
|
||||
('any', 'Any'),
|
||||
('rdp', 'RDP'),
|
||||
('tls', 'TLS'),
|
||||
('nla', 'NLA'),
|
||||
("any", "Any"),
|
||||
("rdp", "RDP"),
|
||||
("tls", "TLS"),
|
||||
("nla", "NLA"),
|
||||
]
|
||||
# RDP
|
||||
console = serializers.BooleanField(required=False)
|
||||
security = serializers.ChoiceField(choices=SECURITY_CHOICES, default='any')
|
||||
security = serializers.ChoiceField(choices=SECURITY_CHOICES, default="any")
|
||||
|
||||
# SFTP
|
||||
sftp_enabled = serializers.BooleanField(default=True, label=_("SFTP enabled"))
|
||||
sftp_home = serializers.CharField(default='/tmp', label=_("SFTP home"))
|
||||
sftp_home = serializers.CharField(default="/tmp", label=_("SFTP home"))
|
||||
|
||||
# HTTP
|
||||
auto_fill = serializers.BooleanField(default=False, label=_("Auto fill"))
|
||||
username_selector = serializers.CharField(default='', allow_blank=True, label=_("Username selector"))
|
||||
password_selector = serializers.CharField(default='', allow_blank=True, label=_("Password selector"))
|
||||
submit_selector = serializers.CharField(default='', allow_blank=True, label=_("Submit selector"))
|
||||
username_selector = serializers.CharField(
|
||||
default="", allow_blank=True, label=_("Username selector")
|
||||
)
|
||||
password_selector = serializers.CharField(
|
||||
default="", allow_blank=True, label=_("Password selector")
|
||||
)
|
||||
submit_selector = serializers.CharField(
|
||||
default="", allow_blank=True, label=_("Submit selector")
|
||||
)
|
||||
|
||||
|
||||
class PlatformAutomationSerializer(serializers.ModelSerializer):
|
||||
class Meta:
|
||||
model = PlatformAutomation
|
||||
fields = [
|
||||
'id', 'ansible_enabled', 'ansible_config',
|
||||
'ping_enabled', 'ping_method',
|
||||
'gather_facts_enabled', 'gather_facts_method',
|
||||
'push_account_enabled', 'push_account_method',
|
||||
'change_secret_enabled', 'change_secret_method',
|
||||
'verify_account_enabled', 'verify_account_method',
|
||||
'gather_accounts_enabled', 'gather_accounts_method',
|
||||
"id",
|
||||
"ansible_enabled",
|
||||
"ansible_config",
|
||||
"ping_enabled",
|
||||
"ping_method",
|
||||
"gather_facts_enabled",
|
||||
"gather_facts_method",
|
||||
"push_account_enabled",
|
||||
"push_account_method",
|
||||
"change_secret_enabled",
|
||||
"change_secret_method",
|
||||
"verify_account_enabled",
|
||||
"verify_account_method",
|
||||
"gather_accounts_enabled",
|
||||
"gather_accounts_method",
|
||||
]
|
||||
extra_kwargs = {
|
||||
'ping_enabled': {'label': '启用资产探测'},
|
||||
'ping_method': {'label': '探测方式'},
|
||||
'gather_facts_enabled': {'label': '启用收集信息'},
|
||||
'gather_facts_method': {'label': '收集信息方式'},
|
||||
'verify_account_enabled': {'label': '启用校验账号'},
|
||||
'verify_account_method': {'label': '校验账号方式'},
|
||||
'push_account_enabled': {'label': '启用推送账号'},
|
||||
'push_account_method': {'label': '推送账号方式'},
|
||||
'change_secret_enabled': {'label': '启用账号改密'},
|
||||
'change_secret_method': {'label': '账号创建改密方式'},
|
||||
'gather_accounts_enabled': {'label': '启用账号收集'},
|
||||
'gather_accounts_method': {'label': '收集账号方式'},
|
||||
"ping_enabled": {"label": "启用资产探测"},
|
||||
"ping_method": {"label": "探测方式"},
|
||||
"gather_facts_enabled": {"label": "启用收集信息"},
|
||||
"gather_facts_method": {"label": "收集信息方式"},
|
||||
"verify_account_enabled": {"label": "启用校验账号"},
|
||||
"verify_account_method": {"label": "校验账号方式"},
|
||||
"push_account_enabled": {"label": "启用推送账号"},
|
||||
"push_account_method": {"label": "推送账号方式"},
|
||||
"change_secret_enabled": {"label": "启用账号改密"},
|
||||
"change_secret_method": {"label": "账号创建改密方式"},
|
||||
"gather_accounts_enabled": {"label": "启用账号收集"},
|
||||
"gather_accounts_method": {"label": "收集账号方式"},
|
||||
}
|
||||
|
||||
|
||||
|
@ -66,42 +80,62 @@ class PlatformProtocolsSerializer(serializers.ModelSerializer):
|
|||
class Meta:
|
||||
model = PlatformProtocol
|
||||
fields = [
|
||||
'id', 'name', 'port', 'primary', 'default',
|
||||
'required', 'secret_types', 'setting',
|
||||
"id",
|
||||
"name",
|
||||
"port",
|
||||
"primary",
|
||||
"default",
|
||||
"required",
|
||||
"secret_types",
|
||||
"setting",
|
||||
]
|
||||
|
||||
|
||||
class PlatformSerializer(WritableNestedModelSerializer):
|
||||
charset = LabeledChoiceField(
|
||||
choices=Platform.CharsetChoices.choices, label=_("Charset")
|
||||
)
|
||||
type = LabeledChoiceField(choices=AllTypes.choices(), label=_("Type"))
|
||||
category = LabeledChoiceField(choices=Category.choices, label=_("Category"))
|
||||
protocols = PlatformProtocolsSerializer(label=_('Protocols'), many=True, required=False)
|
||||
automation = PlatformAutomationSerializer(label=_('Automation'), required=False)
|
||||
protocols = PlatformProtocolsSerializer(
|
||||
label=_("Protocols"), many=True, required=False
|
||||
)
|
||||
automation = PlatformAutomationSerializer(label=_("Automation"), required=False)
|
||||
su_method = LabeledChoiceField(
|
||||
choices=[('sudo', 'sudo su -'), ('su', 'su - ')],
|
||||
label='切换方式', required=False, default='sudo'
|
||||
choices=[("sudo", "sudo su -"), ("su", "su - ")],
|
||||
label="切换方式",
|
||||
required=False,
|
||||
default="sudo",
|
||||
)
|
||||
|
||||
class Meta:
|
||||
model = Platform
|
||||
fields_mini = ['id', 'name', 'internal']
|
||||
fields_mini = ["id", "name", "internal"]
|
||||
fields_small = fields_mini + [
|
||||
'category', 'type', 'charset',
|
||||
"category",
|
||||
"type",
|
||||
"charset",
|
||||
]
|
||||
fields = fields_small + [
|
||||
'protocols_enabled', 'protocols', 'domain_enabled',
|
||||
'su_enabled', 'su_method', 'automation', 'comment',
|
||||
"protocols_enabled",
|
||||
"protocols",
|
||||
"domain_enabled",
|
||||
"su_enabled",
|
||||
"su_method",
|
||||
"automation",
|
||||
"comment",
|
||||
]
|
||||
extra_kwargs = {
|
||||
'su_enabled': {'label': '启用切换账号'},
|
||||
'protocols_enabled': {'label': '启用协议'},
|
||||
'domain_enabled': {'label': "启用网域"},
|
||||
'domain_default': {'label': "默认网域"},
|
||||
"su_enabled": {"label": "启用切换账号"},
|
||||
"protocols_enabled": {"label": "启用协议"},
|
||||
"domain_enabled": {"label": "启用网域"},
|
||||
"domain_default": {"label": "默认网域"},
|
||||
}
|
||||
|
||||
|
||||
class PlatformOpsMethodSerializer(serializers.Serializer):
|
||||
id = serializers.CharField(read_only=True)
|
||||
name = serializers.CharField(max_length=50, label=_('Name'))
|
||||
category = serializers.CharField(max_length=50, label=_('Category'))
|
||||
name = serializers.CharField(max_length=50, label=_("Name"))
|
||||
category = serializers.CharField(max_length=50, label=_("Category"))
|
||||
type = serializers.ListSerializer(child=serializers.CharField())
|
||||
method = serializers.CharField()
|
||||
|
|
|
@ -1,13 +1,16 @@
|
|||
from celery import shared_task
|
||||
from django.utils.translation import gettext_lazy as _
|
||||
|
||||
from orgs.utils import tmp_to_root_org, tmp_to_org
|
||||
from common.utils import get_logger, get_object_or_none
|
||||
from assets.const import AutomationTypes
|
||||
|
||||
logger = get_logger(__file__)
|
||||
|
||||
|
||||
@shared_task(queue='ansible')
|
||||
def execute_automation(pid, trigger, model):
|
||||
@shared_task(queue='ansible', verbose_name=_('Execute automation'))
|
||||
def execute_automation(pid, trigger, tp):
|
||||
model = AutomationTypes.get_type_model(tp)
|
||||
with tmp_to_root_org():
|
||||
instance = get_object_or_none(model, pk=pid)
|
||||
if not instance:
|
||||
|
|
|
@ -1,6 +1,7 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
#
|
||||
from celery import shared_task
|
||||
from django.utils.translation import gettext_lazy as _
|
||||
|
||||
from common.utils import get_object_or_none, get_logger
|
||||
from orgs.utils import tmp_to_org, tmp_to_root_org
|
||||
|
@ -9,7 +10,7 @@ from assets.models import AccountBackupPlan
|
|||
logger = get_logger(__file__)
|
||||
|
||||
|
||||
@shared_task
|
||||
@shared_task(verbose_name=_('Execute account backup plan'))
|
||||
def execute_account_backup_plan(pid, trigger):
|
||||
with tmp_to_root_org():
|
||||
plan = get_object_or_none(AccountBackupPlan, pk=pid)
|
||||
|
|
|
@ -1,6 +1,7 @@
|
|||
# ~*~ coding: utf-8 ~*~
|
||||
from celery import shared_task
|
||||
from django.utils.translation import gettext_noop
|
||||
from django.utils.translation import gettext_lazy as _
|
||||
|
||||
from orgs.utils import tmp_to_root_org, org_aware_func
|
||||
from common.utils import get_logger
|
||||
|
@ -24,7 +25,7 @@ def gather_asset_accounts_util(nodes, task_name):
|
|||
instance.execute()
|
||||
|
||||
|
||||
@shared_task(queue="ansible")
|
||||
@shared_task(queue="ansible", verbose_name=_('Gather asset accounts'))
|
||||
def gather_asset_accounts(node_ids, task_name=None):
|
||||
if task_name is None:
|
||||
task_name = gettext_noop("Gather assets accounts")
|
||||
|
|
|
@ -2,6 +2,7 @@
|
|||
#
|
||||
from celery import shared_task
|
||||
from django.utils.translation import gettext_noop
|
||||
from django.utils.translation import gettext_lazy as _
|
||||
|
||||
from common.utils import get_logger
|
||||
from orgs.utils import org_aware_func, tmp_to_root_org
|
||||
|
@ -40,7 +41,7 @@ def update_assets_hardware_info_util(assets=None, nodes=None, task_name=None):
|
|||
instance.execute()
|
||||
|
||||
|
||||
@shared_task(queue="ansible")
|
||||
@shared_task(queue="ansible", verbose_name=_('Manually update the hardware information of assets'))
|
||||
def update_assets_hardware_info_manual(asset_ids):
|
||||
from assets.models import Asset
|
||||
with tmp_to_root_org():
|
||||
|
@ -49,7 +50,7 @@ def update_assets_hardware_info_manual(asset_ids):
|
|||
update_assets_hardware_info_util(assets=assets, task_name=task_name)
|
||||
|
||||
|
||||
@shared_task(queue="ansible")
|
||||
@shared_task(queue="ansible", verbose_name=_('Manually update the hardware information of assets under a node'))
|
||||
def update_node_assets_hardware_info_manual(node_id):
|
||||
from assets.models import Node
|
||||
with tmp_to_root_org():
|
||||
|
|
|
@ -10,11 +10,10 @@ from common.utils.lock import AcquireFailed
|
|||
from common.utils import get_logger
|
||||
from common.const.crontab import CRONTAB_AT_AM_TWO
|
||||
|
||||
|
||||
logger = get_logger(__file__)
|
||||
|
||||
|
||||
@shared_task
|
||||
@shared_task(verbose_name=_('Check the amount of assets under the node'))
|
||||
def check_node_assets_amount_task(org_id=None):
|
||||
if org_id is None:
|
||||
orgs = Organization.objects.all()
|
||||
|
@ -32,6 +31,6 @@ def check_node_assets_amount_task(org_id=None):
|
|||
|
||||
|
||||
@register_as_period_task(crontab=CRONTAB_AT_AM_TWO)
|
||||
@shared_task
|
||||
@shared_task(verbose_name=_('Periodic check the amount of assets under the node'))
|
||||
def check_node_assets_amount_period_task():
|
||||
check_node_assets_amount_task()
|
||||
|
|
|
@ -1,6 +1,7 @@
|
|||
# ~*~ coding: utf-8 ~*~
|
||||
from celery import shared_task
|
||||
from django.utils.translation import gettext_noop
|
||||
from django.utils.translation import gettext_lazy as _
|
||||
|
||||
from common.utils import get_logger
|
||||
from orgs.utils import org_aware_func, tmp_to_root_org
|
||||
|
@ -29,7 +30,7 @@ def test_asset_connectivity_util(assets, task_name=None):
|
|||
instance.execute()
|
||||
|
||||
|
||||
@shared_task(queue="ansible")
|
||||
@shared_task(queue="ansible", verbose_name=_('Manually test the connectivity of a asset'))
|
||||
def test_assets_connectivity_manual(asset_ids):
|
||||
from assets.models import Asset
|
||||
with tmp_to_root_org():
|
||||
|
@ -39,7 +40,7 @@ def test_assets_connectivity_manual(asset_ids):
|
|||
test_asset_connectivity_util(assets, task_name=task_name)
|
||||
|
||||
|
||||
@shared_task(queue="ansible")
|
||||
@shared_task(queue="ansible", verbose_name=_('Manually test the connectivity of assets under a node'))
|
||||
def test_node_assets_connectivity_manual(node_id):
|
||||
from assets.models import Node
|
||||
with tmp_to_root_org():
|
||||
|
|
|
@ -3,6 +3,7 @@ from django.utils.translation import gettext_noop
|
|||
|
||||
from common.utils import get_logger
|
||||
from orgs.utils import org_aware_func, tmp_to_root_org
|
||||
from django.utils.translation import ugettext_lazy as _
|
||||
|
||||
logger = get_logger(__file__)
|
||||
__all__ = [
|
||||
|
@ -27,7 +28,7 @@ def push_accounts_to_assets_util(accounts, assets):
|
|||
instance.execute()
|
||||
|
||||
|
||||
@shared_task(queue="ansible")
|
||||
@shared_task(queue="ansible", verbose_name=_('Push accounts to assets'))
|
||||
def push_accounts_to_assets(account_ids, asset_ids):
|
||||
from assets.models import Asset, Account
|
||||
with tmp_to_root_org():
|
||||
|
|
|
@ -1,5 +1,6 @@
|
|||
from celery import shared_task
|
||||
from django.utils.translation import gettext_noop
|
||||
from django.utils.translation import ugettext as _
|
||||
|
||||
from common.utils import get_logger
|
||||
from orgs.utils import org_aware_func, tmp_to_root_org
|
||||
|
@ -26,7 +27,7 @@ def verify_accounts_connectivity_util(accounts, assets, task_name):
|
|||
instance.execute()
|
||||
|
||||
|
||||
@shared_task(queue="ansible")
|
||||
@shared_task(queue="ansible", verbose_name=_('Verify asset account availability'))
|
||||
def verify_accounts_connectivity(account_ids, asset_ids):
|
||||
from assets.models import Asset, Account
|
||||
with tmp_to_root_org():
|
||||
|
|
|
@ -1,24 +1,74 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
#
|
||||
from django.utils.translation import ugettext_lazy as _
|
||||
from django.db.models import TextChoices, IntegerChoices
|
||||
|
||||
DEFAULT_CITY = _("Unknown")
|
||||
|
||||
MODELS_NEED_RECORD = (
|
||||
# users
|
||||
'User', 'UserGroup',
|
||||
"User",
|
||||
"UserGroup",
|
||||
# acls
|
||||
'LoginACL', 'LoginAssetACL', 'LoginConfirmSetting',
|
||||
"LoginACL",
|
||||
"LoginAssetACL",
|
||||
"LoginConfirmSetting",
|
||||
# assets
|
||||
'Asset', 'Node', 'AdminUser', 'SystemUser', 'Domain', 'Gateway', 'CommandFilterRule',
|
||||
'CommandFilter', 'Platform', 'Account',
|
||||
"Asset",
|
||||
"Node",
|
||||
"AdminUser",
|
||||
"SystemUser",
|
||||
"Domain",
|
||||
"Gateway",
|
||||
"CommandFilterRule",
|
||||
"CommandFilter",
|
||||
"Platform",
|
||||
"Account",
|
||||
# applications
|
||||
# orgs
|
||||
'Organization',
|
||||
"Organization",
|
||||
# settings
|
||||
'Setting',
|
||||
"Setting",
|
||||
# perms
|
||||
'AssetPermission',
|
||||
"AssetPermission",
|
||||
# xpack
|
||||
'License', 'Account', 'SyncInstanceTask', 'ChangeAuthPlan', 'GatherUserTask',
|
||||
"License",
|
||||
"Account",
|
||||
"SyncInstanceTask",
|
||||
"ChangeAuthPlan",
|
||||
"GatherUserTask",
|
||||
)
|
||||
|
||||
|
||||
class OperateChoices(TextChoices):
|
||||
mkdir = "mkdir", _("Mkdir")
|
||||
rmdir = "rmdir", _("Rmdir")
|
||||
delete = "delete", _("Delete")
|
||||
upload = "upload", _("Upload")
|
||||
rename = "rename", _("Rename")
|
||||
symlink = "symlink", _("Symlink")
|
||||
download = "download", _("Download")
|
||||
|
||||
|
||||
class ActionChoices(TextChoices):
|
||||
view = "view", _("View")
|
||||
update = "update", _("Update")
|
||||
delete = "delete", _("Delete")
|
||||
create = "create", _("Create")
|
||||
|
||||
|
||||
class LoginTypeChoices(TextChoices):
|
||||
web = "W", _("Web")
|
||||
terminal = "T", _("Terminal")
|
||||
unknown = "U", _("Unknown")
|
||||
|
||||
|
||||
class MFAChoices(IntegerChoices):
|
||||
disabled = 0, _("Disabled")
|
||||
enabled = 1, _("Enabled")
|
||||
unknown = 2, _("-")
|
||||
|
||||
|
||||
class LoginStatusChoices(IntegerChoices):
|
||||
success = True, _("Success")
|
||||
failed = False, _("Failed")
|
||||
|
|
|
@ -0,0 +1,28 @@
|
|||
# Generated by Django 3.2.14 on 2022-11-11 11:19
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('audits', '0014_auto_20220505_1902'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AlterField(
|
||||
model_name='ftplog',
|
||||
name='operate',
|
||||
field=models.CharField(choices=[('mkdir', 'Mkdir'), ('rmdir', 'Rmdir'), ('delete', 'Delete'), ('upload', 'Upload'), ('rename', 'Rename'), ('symlink', 'Symlink'), ('download', 'Download')], max_length=16, verbose_name='Operate'),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='operatelog',
|
||||
name='action',
|
||||
field=models.CharField(choices=[('view', 'View'), ('update', 'Update'), ('delete', 'Delete'), ('create', 'Create')], max_length=16, verbose_name='Action'),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='userloginlog',
|
||||
name='status',
|
||||
field=models.BooleanField(choices=[(1, 'Success'), (0, 'Failed')], default=1, verbose_name='Status'),
|
||||
),
|
||||
]
|
|
@ -0,0 +1,18 @@
|
|||
# Generated by Django 3.2.16 on 2022-11-30 07:36
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('audits', '0015_auto_20221011_1745'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AlterField(
|
||||
model_name='userloginlog',
|
||||
name='type',
|
||||
field=models.CharField(choices=[('web_cli', 'Web Client'), ('web_gui', 'Web GUI'), ('db_cli', 'DB Client'), ('db_gui', 'DB GUI'), ('rdp_cli', 'RDP Client'), ('rdp_file', 'RDP File'), ('ssh_cli', 'SSH Client'), ('web_sftp', 'Web SFTP')], max_length=128, verbose_name='Login type'),
|
||||
),
|
||||
]
|
|
@ -8,63 +8,55 @@ from common.utils import lazyproperty
|
|||
|
||||
from orgs.mixins.models import OrgModelMixin, Organization
|
||||
from orgs.utils import current_org
|
||||
from .const import (
|
||||
OperateChoices,
|
||||
ActionChoices,
|
||||
LoginTypeChoices,
|
||||
MFAChoices,
|
||||
LoginStatusChoices,
|
||||
)
|
||||
|
||||
__all__ = [
|
||||
'FTPLog', 'OperateLog', 'PasswordChangeLog', 'UserLoginLog',
|
||||
"FTPLog",
|
||||
"OperateLog",
|
||||
"PasswordChangeLog",
|
||||
"UserLoginLog",
|
||||
]
|
||||
|
||||
|
||||
class FTPLog(OrgModelMixin):
|
||||
OPERATE_DELETE = 'Delete'
|
||||
OPERATE_UPLOAD = 'Upload'
|
||||
OPERATE_DOWNLOAD = 'Download'
|
||||
OPERATE_RMDIR = 'Rmdir'
|
||||
OPERATE_RENAME = 'Rename'
|
||||
OPERATE_MKDIR = 'Mkdir'
|
||||
OPERATE_SYMLINK = 'Symlink'
|
||||
|
||||
OPERATE_CHOICES = (
|
||||
(OPERATE_DELETE, _('Delete')),
|
||||
(OPERATE_UPLOAD, _('Upload')),
|
||||
(OPERATE_DOWNLOAD, _('Download')),
|
||||
(OPERATE_RMDIR, _('Rmdir')),
|
||||
(OPERATE_RENAME, _('Rename')),
|
||||
(OPERATE_MKDIR, _('Mkdir')),
|
||||
(OPERATE_SYMLINK, _('Symlink'))
|
||||
)
|
||||
|
||||
id = models.UUIDField(default=uuid.uuid4, primary_key=True)
|
||||
user = models.CharField(max_length=128, verbose_name=_('User'))
|
||||
remote_addr = models.CharField(max_length=128, verbose_name=_("Remote addr"), blank=True, null=True)
|
||||
user = models.CharField(max_length=128, verbose_name=_("User"))
|
||||
remote_addr = models.CharField(
|
||||
max_length=128, verbose_name=_("Remote addr"), blank=True, null=True
|
||||
)
|
||||
asset = models.CharField(max_length=1024, verbose_name=_("Asset"))
|
||||
system_user = models.CharField(max_length=128, verbose_name=_("System user"))
|
||||
operate = models.CharField(max_length=16, verbose_name=_("Operate"), choices=OPERATE_CHOICES)
|
||||
operate = models.CharField(
|
||||
max_length=16, verbose_name=_("Operate"), choices=OperateChoices.choices
|
||||
)
|
||||
filename = models.CharField(max_length=1024, verbose_name=_("Filename"))
|
||||
is_success = models.BooleanField(default=True, verbose_name=_("Success"))
|
||||
date_start = models.DateTimeField(auto_now_add=True, verbose_name=_('Date start'))
|
||||
date_start = models.DateTimeField(auto_now_add=True, verbose_name=_("Date start"))
|
||||
|
||||
class Meta:
|
||||
verbose_name = _("File transfer log")
|
||||
|
||||
|
||||
class OperateLog(OrgModelMixin):
|
||||
ACTION_CREATE = 'create'
|
||||
ACTION_VIEW = 'view'
|
||||
ACTION_UPDATE = 'update'
|
||||
ACTION_DELETE = 'delete'
|
||||
ACTION_CHOICES = (
|
||||
(ACTION_CREATE, _("Create")),
|
||||
(ACTION_VIEW, _("View")),
|
||||
(ACTION_UPDATE, _("Update")),
|
||||
(ACTION_DELETE, _("Delete"))
|
||||
)
|
||||
id = models.UUIDField(default=uuid.uuid4, primary_key=True)
|
||||
user = models.CharField(max_length=128, verbose_name=_('User'))
|
||||
action = models.CharField(max_length=16, choices=ACTION_CHOICES, verbose_name=_("Action"))
|
||||
user = models.CharField(max_length=128, verbose_name=_("User"))
|
||||
action = models.CharField(
|
||||
max_length=16, choices=ActionChoices.choices, verbose_name=_("Action")
|
||||
)
|
||||
resource_type = models.CharField(max_length=64, verbose_name=_("Resource Type"))
|
||||
resource = models.CharField(max_length=128, verbose_name=_("Resource"))
|
||||
remote_addr = models.CharField(max_length=128, verbose_name=_("Remote addr"), blank=True, null=True)
|
||||
datetime = models.DateTimeField(auto_now=True, verbose_name=_('Datetime'), db_index=True)
|
||||
remote_addr = models.CharField(
|
||||
max_length=128, verbose_name=_("Remote addr"), blank=True, null=True
|
||||
)
|
||||
datetime = models.DateTimeField(
|
||||
auto_now=True, verbose_name=_("Datetime"), db_index=True
|
||||
)
|
||||
|
||||
def __str__(self):
|
||||
return "<{}> {} <{}>".format(self.user, self.action, self.resource)
|
||||
|
@ -84,50 +76,48 @@ class OperateLog(OrgModelMixin):
|
|||
|
||||
class PasswordChangeLog(models.Model):
|
||||
id = models.UUIDField(default=uuid.uuid4, primary_key=True)
|
||||
user = models.CharField(max_length=128, verbose_name=_('User'))
|
||||
user = models.CharField(max_length=128, verbose_name=_("User"))
|
||||
change_by = models.CharField(max_length=128, verbose_name=_("Change by"))
|
||||
remote_addr = models.CharField(max_length=128, verbose_name=_("Remote addr"), blank=True, null=True)
|
||||
datetime = models.DateTimeField(auto_now=True, verbose_name=_('Datetime'))
|
||||
remote_addr = models.CharField(
|
||||
max_length=128, verbose_name=_("Remote addr"), blank=True, null=True
|
||||
)
|
||||
datetime = models.DateTimeField(auto_now=True, verbose_name=_("Datetime"))
|
||||
|
||||
def __str__(self):
|
||||
return "{} change {}'s password".format(self.change_by, self.user)
|
||||
|
||||
class Meta:
|
||||
verbose_name = _('Password change log')
|
||||
verbose_name = _("Password change log")
|
||||
|
||||
|
||||
class UserLoginLog(models.Model):
|
||||
LOGIN_TYPE_CHOICE = (
|
||||
('W', 'Web'),
|
||||
('T', 'Terminal'),
|
||||
('U', 'Unknown'),
|
||||
)
|
||||
|
||||
MFA_DISABLED = 0
|
||||
MFA_ENABLED = 1
|
||||
MFA_UNKNOWN = 2
|
||||
|
||||
MFA_CHOICE = (
|
||||
(MFA_DISABLED, _('Disabled')),
|
||||
(MFA_ENABLED, _('Enabled')),
|
||||
(MFA_UNKNOWN, _('-')),
|
||||
)
|
||||
|
||||
STATUS_CHOICE = (
|
||||
(True, _('Success')),
|
||||
(False, _('Failed'))
|
||||
)
|
||||
id = models.UUIDField(default=uuid.uuid4, primary_key=True)
|
||||
username = models.CharField(max_length=128, verbose_name=_('Username'))
|
||||
type = models.CharField(choices=LOGIN_TYPE_CHOICE, max_length=2, verbose_name=_('Login type'))
|
||||
ip = models.GenericIPAddressField(verbose_name=_('Login ip'))
|
||||
city = models.CharField(max_length=254, blank=True, null=True, verbose_name=_('Login city'))
|
||||
user_agent = models.CharField(max_length=254, blank=True, null=True, verbose_name=_('User agent'))
|
||||
mfa = models.SmallIntegerField(default=MFA_UNKNOWN, choices=MFA_CHOICE, verbose_name=_('MFA'))
|
||||
reason = models.CharField(default='', max_length=128, blank=True, verbose_name=_('Reason'))
|
||||
status = models.BooleanField(max_length=2, default=True, choices=STATUS_CHOICE, verbose_name=_('Status'))
|
||||
datetime = models.DateTimeField(default=timezone.now, verbose_name=_('Date login'))
|
||||
backend = models.CharField(max_length=32, default='', verbose_name=_('Authentication backend'))
|
||||
username = models.CharField(max_length=128, verbose_name=_("Username"))
|
||||
type = models.CharField(
|
||||
choices=LoginTypeChoices.choices, max_length=2, verbose_name=_("Login type")
|
||||
)
|
||||
ip = models.GenericIPAddressField(verbose_name=_("Login ip"))
|
||||
city = models.CharField(
|
||||
max_length=254, blank=True, null=True, verbose_name=_("Login city")
|
||||
)
|
||||
user_agent = models.CharField(
|
||||
max_length=254, blank=True, null=True, verbose_name=_("User agent")
|
||||
)
|
||||
mfa = models.SmallIntegerField(
|
||||
default=MFAChoices.unknown, choices=MFAChoices.choices, verbose_name=_("MFA")
|
||||
)
|
||||
reason = models.CharField(
|
||||
default="", max_length=128, blank=True, verbose_name=_("Reason")
|
||||
)
|
||||
status = models.BooleanField(
|
||||
default=LoginStatusChoices.success,
|
||||
choices=LoginStatusChoices.choices,
|
||||
verbose_name=_("Status"),
|
||||
)
|
||||
datetime = models.DateTimeField(default=timezone.now, verbose_name=_("Date login"))
|
||||
backend = models.CharField(
|
||||
max_length=32, default="", verbose_name=_("Authentication backend")
|
||||
)
|
||||
|
||||
@property
|
||||
def backend_display(self):
|
||||
|
@ -137,8 +127,8 @@ class UserLoginLog(models.Model):
|
|||
def get_login_logs(cls, date_from=None, date_to=None, user=None, keyword=None):
|
||||
login_logs = cls.objects.all()
|
||||
if date_from and date_to:
|
||||
date_from = "{} {}".format(date_from, '00:00:00')
|
||||
date_to = "{} {}".format(date_to, '23:59:59')
|
||||
date_from = "{} {}".format(date_from, "00:00:00")
|
||||
date_to = "{} {}".format(date_to, "23:59:59")
|
||||
login_logs = login_logs.filter(
|
||||
datetime__gte=date_from, datetime__lte=date_to
|
||||
)
|
||||
|
@ -146,18 +136,19 @@ class UserLoginLog(models.Model):
|
|||
login_logs = login_logs.filter(username=user)
|
||||
if keyword:
|
||||
login_logs = login_logs.filter(
|
||||
Q(ip__contains=keyword) |
|
||||
Q(city__contains=keyword) |
|
||||
Q(username__contains=keyword)
|
||||
Q(ip__contains=keyword)
|
||||
| Q(city__contains=keyword)
|
||||
| Q(username__contains=keyword)
|
||||
)
|
||||
if not current_org.is_root():
|
||||
username_list = current_org.get_members().values_list('username', flat=True)
|
||||
username_list = current_org.get_members().values_list("username", flat=True)
|
||||
login_logs = login_logs.filter(username__in=username_list)
|
||||
return login_logs
|
||||
|
||||
@property
|
||||
def reason_display(self):
|
||||
from authentication.errors import reason_choices, old_reason_choices
|
||||
|
||||
reason = reason_choices.get(self.reason)
|
||||
if reason:
|
||||
return reason
|
||||
|
@ -165,5 +156,5 @@ class UserLoginLog(models.Model):
|
|||
return reason
|
||||
|
||||
class Meta:
|
||||
ordering = ['-datetime', 'username']
|
||||
verbose_name = _('User login log')
|
||||
ordering = ["-datetime", "username"]
|
||||
verbose_name = _("User login log")
|
||||
|
|
|
@ -3,77 +3,99 @@
|
|||
from django.utils.translation import ugettext_lazy as _
|
||||
from rest_framework import serializers
|
||||
|
||||
from common.drf.serializers import BulkSerializerMixin
|
||||
from common.drf.fields import LabeledChoiceField
|
||||
from terminal.models import Session
|
||||
from . import models
|
||||
from .const import (
|
||||
ActionChoices,
|
||||
OperateChoices,
|
||||
MFAChoices,
|
||||
LoginStatusChoices,
|
||||
LoginTypeChoices,
|
||||
)
|
||||
|
||||
|
||||
class FTPLogSerializer(serializers.ModelSerializer):
|
||||
operate_display = serializers.ReadOnlyField(source='get_operate_display', label=_('Operate display'))
|
||||
operate = LabeledChoiceField(choices=OperateChoices.choices, label=_("Operate"))
|
||||
|
||||
class Meta:
|
||||
model = models.FTPLog
|
||||
fields_mini = ['id']
|
||||
fields_mini = ["id"]
|
||||
fields_small = fields_mini + [
|
||||
'user', 'remote_addr', 'asset', 'system_user', 'org_id',
|
||||
'operate', 'filename', 'operate_display',
|
||||
'is_success',
|
||||
'date_start',
|
||||
"user",
|
||||
"remote_addr",
|
||||
"asset",
|
||||
"system_user",
|
||||
"org_id",
|
||||
"operate",
|
||||
"filename",
|
||||
"is_success",
|
||||
"date_start",
|
||||
]
|
||||
fields = fields_small
|
||||
|
||||
|
||||
class UserLoginLogSerializer(serializers.ModelSerializer):
|
||||
type_display = serializers.ReadOnlyField(source='get_type_display', label=_('Type display'))
|
||||
status_display = serializers.ReadOnlyField(source='get_status_display', label=_('Status display'))
|
||||
mfa_display = serializers.ReadOnlyField(source='get_mfa_display', label=_('MFA display'))
|
||||
mfa = LabeledChoiceField(choices=MFAChoices.choices, label=_("MFA"))
|
||||
type = LabeledChoiceField(choices=LoginTypeChoices.choices, label=_("Type"))
|
||||
status = LabeledChoiceField(choices=LoginStatusChoices.choices, label=_("Status"))
|
||||
|
||||
class Meta:
|
||||
model = models.UserLoginLog
|
||||
fields_mini = ['id']
|
||||
fields_mini = ["id"]
|
||||
fields_small = fields_mini + [
|
||||
'username', 'type', 'type_display', 'ip', 'city', 'user_agent',
|
||||
'mfa', 'mfa_display', 'reason', 'reason_display', 'backend', 'backend_display',
|
||||
'status', 'status_display',
|
||||
'datetime',
|
||||
"username",
|
||||
"type",
|
||||
"ip",
|
||||
"city",
|
||||
"user_agent",
|
||||
"mfa",
|
||||
"reason",
|
||||
"reason_display",
|
||||
"backend",
|
||||
"backend_display",
|
||||
"status",
|
||||
"datetime",
|
||||
]
|
||||
fields = fields_small
|
||||
extra_kwargs = {
|
||||
"user_agent": {'label': _('User agent')},
|
||||
"reason_display": {'label': _('Reason display')},
|
||||
'backend_display': {'label': _('Authentication backend')}
|
||||
"user_agent": {"label": _("User agent")},
|
||||
"reason_display": {"label": _("Reason display")},
|
||||
"backend_display": {"label": _("Authentication backend")},
|
||||
}
|
||||
|
||||
|
||||
class OperateLogSerializer(serializers.ModelSerializer):
|
||||
action_display = serializers.CharField(source='get_action_display', label=_('Action'))
|
||||
action = LabeledChoiceField(choices=ActionChoices.choices, label=_("Action"))
|
||||
|
||||
class Meta:
|
||||
model = models.OperateLog
|
||||
fields_mini = ['id']
|
||||
fields_mini = ["id"]
|
||||
fields_small = fields_mini + [
|
||||
'user', 'action', 'action_display',
|
||||
'resource_type', 'resource_type_display', 'resource',
|
||||
'remote_addr', 'datetime', 'org_id'
|
||||
"user",
|
||||
"action",
|
||||
"resource_type",
|
||||
"resource_type_display",
|
||||
"resource",
|
||||
"remote_addr",
|
||||
"datetime",
|
||||
"org_id",
|
||||
]
|
||||
fields = fields_small
|
||||
extra_kwargs = {
|
||||
'resource_type_display': {'label': _('Resource Type')}
|
||||
}
|
||||
extra_kwargs = {"resource_type_display": {"label": _("Resource Type")}}
|
||||
|
||||
|
||||
class PasswordChangeLogSerializer(serializers.ModelSerializer):
|
||||
class Meta:
|
||||
model = models.PasswordChangeLog
|
||||
fields = (
|
||||
'id', 'user', 'change_by', 'remote_addr', 'datetime'
|
||||
)
|
||||
fields = ("id", "user", "change_by", "remote_addr", "datetime")
|
||||
|
||||
|
||||
class SessionAuditSerializer(serializers.ModelSerializer):
|
||||
class Meta:
|
||||
model = Session
|
||||
fields = '__all__'
|
||||
fields = "__all__"
|
||||
|
||||
|
||||
#
|
||||
# class CommandExecutionSerializer(serializers.ModelSerializer):
|
||||
|
|
|
@ -1,38 +1,34 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
#
|
||||
import time
|
||||
|
||||
from django.db.models.signals import (
|
||||
post_save, m2m_changed, pre_delete
|
||||
)
|
||||
from django.dispatch import receiver
|
||||
from django.conf import settings
|
||||
from django.db import transaction
|
||||
from django.utils import timezone
|
||||
from django.dispatch import receiver
|
||||
from django.utils import timezone, translation
|
||||
from django.utils.functional import LazyObject
|
||||
from django.contrib.auth import BACKEND_SESSION_KEY
|
||||
from django.utils.translation import ugettext_lazy as _
|
||||
from django.utils import translation
|
||||
from rest_framework.renderers import JSONRenderer
|
||||
from django.db.models.signals import post_save, m2m_changed, pre_delete
|
||||
from rest_framework.request import Request
|
||||
from rest_framework.renderers import JSONRenderer
|
||||
|
||||
from assets.models import Asset
|
||||
from authentication.signals import post_auth_failed, post_auth_success
|
||||
from authentication.utils import check_different_city_login_if_need
|
||||
from jumpserver.utils import current_request
|
||||
from users.models import User
|
||||
from users.signals import post_user_change_password
|
||||
from terminal.models import Session, Command
|
||||
from .utils import write_login_log, create_operate_log
|
||||
from . import models, serializers
|
||||
from .models import OperateLog
|
||||
from orgs.utils import current_org
|
||||
from perms.models import AssetPermission
|
||||
from terminal.backends.command.serializers import SessionCommandSerializer
|
||||
from users.models import User
|
||||
from users.signals import post_user_change_password
|
||||
from assets.models import Asset
|
||||
from jumpserver.utils import current_request
|
||||
from authentication.signals import post_auth_failed, post_auth_success
|
||||
from authentication.utils import check_different_city_login_if_need
|
||||
from terminal.models import Session, Command
|
||||
from terminal.serializers import SessionSerializer
|
||||
from terminal.backends.command.serializers import SessionCommandSerializer
|
||||
from common.const.signals import POST_ADD, POST_REMOVE, POST_CLEAR
|
||||
from common.utils import get_request_ip, get_logger, get_syslogger
|
||||
from common.utils.encode import data_to_json
|
||||
from . import models, serializers
|
||||
from .const import ActionChoices
|
||||
from .utils import write_login_log, create_operate_log
|
||||
|
||||
|
||||
logger = get_logger(__name__)
|
||||
sys_logger = get_syslogger(__name__)
|
||||
|
@ -46,14 +42,14 @@ class AuthBackendLabelMapping(LazyObject):
|
|||
for source, backends in User.SOURCE_BACKEND_MAPPING.items():
|
||||
for backend in backends:
|
||||
backend_label_mapping[backend] = source.label
|
||||
backend_label_mapping[settings.AUTH_BACKEND_PUBKEY] = _('SSH Key')
|
||||
backend_label_mapping[settings.AUTH_BACKEND_MODEL] = _('Password')
|
||||
backend_label_mapping[settings.AUTH_BACKEND_SSO] = _('SSO')
|
||||
backend_label_mapping[settings.AUTH_BACKEND_AUTH_TOKEN] = _('Auth Token')
|
||||
backend_label_mapping[settings.AUTH_BACKEND_WECOM] = _('WeCom')
|
||||
backend_label_mapping[settings.AUTH_BACKEND_FEISHU] = _('FeiShu')
|
||||
backend_label_mapping[settings.AUTH_BACKEND_DINGTALK] = _('DingTalk')
|
||||
backend_label_mapping[settings.AUTH_BACKEND_TEMP_TOKEN] = _('Temporary token')
|
||||
backend_label_mapping[settings.AUTH_BACKEND_PUBKEY] = _("SSH Key")
|
||||
backend_label_mapping[settings.AUTH_BACKEND_MODEL] = _("Password")
|
||||
backend_label_mapping[settings.AUTH_BACKEND_SSO] = _("SSO")
|
||||
backend_label_mapping[settings.AUTH_BACKEND_AUTH_TOKEN] = _("Auth Token")
|
||||
backend_label_mapping[settings.AUTH_BACKEND_WECOM] = _("WeCom")
|
||||
backend_label_mapping[settings.AUTH_BACKEND_FEISHU] = _("FeiShu")
|
||||
backend_label_mapping[settings.AUTH_BACKEND_DINGTALK] = _("DingTalk")
|
||||
backend_label_mapping[settings.AUTH_BACKEND_TEMP_TOKEN] = _("Temporary token")
|
||||
return backend_label_mapping
|
||||
|
||||
def _setup(self):
|
||||
|
@ -65,41 +61,41 @@ AUTH_BACKEND_LABEL_MAPPING = AuthBackendLabelMapping()
|
|||
|
||||
M2M_NEED_RECORD = {
|
||||
User.groups.through.__name__: (
|
||||
_('User and Group'),
|
||||
_('{User} JOINED {UserGroup}'),
|
||||
_('{User} LEFT {UserGroup}')
|
||||
_("User and Group"),
|
||||
_("{User} JOINED {UserGroup}"),
|
||||
_("{User} LEFT {UserGroup}"),
|
||||
),
|
||||
Asset.nodes.through.__name__: (
|
||||
_('Node and Asset'),
|
||||
_('{Node} ADD {Asset}'),
|
||||
_('{Node} REMOVE {Asset}')
|
||||
_("Node and Asset"),
|
||||
_("{Node} ADD {Asset}"),
|
||||
_("{Node} REMOVE {Asset}"),
|
||||
),
|
||||
AssetPermission.users.through.__name__: (
|
||||
_('User asset permissions'),
|
||||
_('{AssetPermission} ADD {User}'),
|
||||
_('{AssetPermission} REMOVE {User}'),
|
||||
_("User asset permissions"),
|
||||
_("{AssetPermission} ADD {User}"),
|
||||
_("{AssetPermission} REMOVE {User}"),
|
||||
),
|
||||
AssetPermission.user_groups.through.__name__: (
|
||||
_('User group asset permissions'),
|
||||
_('{AssetPermission} ADD {UserGroup}'),
|
||||
_('{AssetPermission} REMOVE {UserGroup}'),
|
||||
_("User group asset permissions"),
|
||||
_("{AssetPermission} ADD {UserGroup}"),
|
||||
_("{AssetPermission} REMOVE {UserGroup}"),
|
||||
),
|
||||
AssetPermission.assets.through.__name__: (
|
||||
_('Asset permission'),
|
||||
_('{AssetPermission} ADD {Asset}'),
|
||||
_('{AssetPermission} REMOVE {Asset}'),
|
||||
_("Asset permission"),
|
||||
_("{AssetPermission} ADD {Asset}"),
|
||||
_("{AssetPermission} REMOVE {Asset}"),
|
||||
),
|
||||
AssetPermission.nodes.through.__name__: (
|
||||
_('Node permission'),
|
||||
_('{AssetPermission} ADD {Node}'),
|
||||
_('{AssetPermission} REMOVE {Node}'),
|
||||
_("Node permission"),
|
||||
_("{AssetPermission} ADD {Node}"),
|
||||
_("{AssetPermission} REMOVE {Node}"),
|
||||
),
|
||||
}
|
||||
|
||||
M2M_ACTION_MAPER = {
|
||||
POST_ADD: OperateLog.ACTION_CREATE,
|
||||
POST_REMOVE: OperateLog.ACTION_DELETE,
|
||||
POST_CLEAR: OperateLog.ACTION_DELETE,
|
||||
POST_ADD: ActionChoices.create,
|
||||
POST_REMOVE: ActionChoices.delete,
|
||||
POST_CLEAR: ActionChoices.delete,
|
||||
}
|
||||
|
||||
|
||||
|
@ -117,12 +113,14 @@ def on_m2m_changed(sender, action, instance, model, pk_set, **kwargs):
|
|||
org_id = current_org.id
|
||||
remote_addr = get_request_ip(current_request)
|
||||
user = str(user)
|
||||
resource_type, resource_tmpl_add, resource_tmpl_remove = M2M_NEED_RECORD[sender_name]
|
||||
resource_type, resource_tmpl_add, resource_tmpl_remove = M2M_NEED_RECORD[
|
||||
sender_name
|
||||
]
|
||||
|
||||
action = M2M_ACTION_MAPER[action]
|
||||
if action == OperateLog.ACTION_CREATE:
|
||||
if action == ActionChoices.create:
|
||||
resource_tmpl = resource_tmpl_add
|
||||
elif action == OperateLog.ACTION_DELETE:
|
||||
elif action == ActionChoices.delete:
|
||||
resource_tmpl = resource_tmpl_remove
|
||||
else:
|
||||
return
|
||||
|
@ -139,41 +137,53 @@ def on_m2m_changed(sender, action, instance, model, pk_set, **kwargs):
|
|||
|
||||
print("Instace name: ", instance_name, instance_value)
|
||||
for obj in objs:
|
||||
resource = resource_tmpl.format(**{
|
||||
instance_name: instance_value,
|
||||
model_name: str(obj)
|
||||
})[:128] # `resource` 字段只有 128 个字符长 😔
|
||||
resource = resource_tmpl.format(
|
||||
**{instance_name: instance_value, model_name: str(obj)}
|
||||
)[
|
||||
:128
|
||||
] # `resource` 字段只有 128 个字符长 😔
|
||||
|
||||
to_create.append(OperateLog(
|
||||
user=user, action=action, resource_type=resource_type,
|
||||
resource=resource, remote_addr=remote_addr, org_id=org_id
|
||||
))
|
||||
OperateLog.objects.bulk_create(to_create)
|
||||
to_create.append(
|
||||
models.OperateLog(
|
||||
user=user,
|
||||
action=action,
|
||||
resource_type=resource_type,
|
||||
resource=resource,
|
||||
remote_addr=remote_addr,
|
||||
org_id=org_id,
|
||||
)
|
||||
)
|
||||
models.OperateLog.objects.bulk_create(to_create)
|
||||
|
||||
|
||||
@receiver(post_save)
|
||||
def on_object_created_or_update(sender, instance=None, created=False, update_fields=None, **kwargs):
|
||||
def on_object_created_or_update(
|
||||
sender, instance=None, created=False, update_fields=None, **kwargs
|
||||
):
|
||||
# last_login 改变是最后登录日期, 每次登录都会改变
|
||||
if instance._meta.object_name == 'User' and \
|
||||
update_fields and 'last_login' in update_fields:
|
||||
if (
|
||||
instance._meta.object_name == "User"
|
||||
and update_fields
|
||||
and "last_login" in update_fields
|
||||
):
|
||||
return
|
||||
if created:
|
||||
action = models.OperateLog.ACTION_CREATE
|
||||
action = ActionChoices.create
|
||||
else:
|
||||
action = models.OperateLog.ACTION_UPDATE
|
||||
action = ActionChoices.update
|
||||
create_operate_log(action, sender, instance)
|
||||
|
||||
|
||||
@receiver(pre_delete)
|
||||
def on_object_delete(sender, instance=None, **kwargs):
|
||||
create_operate_log(models.OperateLog.ACTION_DELETE, sender, instance)
|
||||
create_operate_log(ActionChoices.delete, sender, instance)
|
||||
|
||||
|
||||
@receiver(post_user_change_password, sender=User)
|
||||
def on_user_change_password(sender, user=None, **kwargs):
|
||||
if not current_request:
|
||||
remote_addr = '127.0.0.1'
|
||||
change_by = 'System'
|
||||
remote_addr = "127.0.0.1"
|
||||
change_by = "System"
|
||||
else:
|
||||
remote_addr = get_request_ip(current_request)
|
||||
if not current_request.user.is_authenticated:
|
||||
|
@ -182,7 +192,8 @@ def on_user_change_password(sender, user=None, **kwargs):
|
|||
change_by = str(current_request.user)
|
||||
with transaction.atomic():
|
||||
models.PasswordChangeLog.objects.create(
|
||||
user=str(user), change_by=change_by,
|
||||
user=str(user),
|
||||
change_by=change_by,
|
||||
remote_addr=remote_addr,
|
||||
)
|
||||
|
||||
|
@ -216,51 +227,52 @@ def on_audits_log_create(sender, instance=None, **kwargs):
|
|||
|
||||
|
||||
def get_login_backend(request):
|
||||
backend = request.session.get('auth_backend', '') or \
|
||||
request.session.get(BACKEND_SESSION_KEY, '')
|
||||
backend = request.session.get("auth_backend", "") or request.session.get(
|
||||
BACKEND_SESSION_KEY, ""
|
||||
)
|
||||
|
||||
backend_label = AUTH_BACKEND_LABEL_MAPPING.get(backend, None)
|
||||
if backend_label is None:
|
||||
backend_label = ''
|
||||
backend_label = ""
|
||||
return backend_label
|
||||
|
||||
|
||||
def generate_data(username, request, login_type=None):
|
||||
user_agent = request.META.get('HTTP_USER_AGENT', '')
|
||||
login_ip = get_request_ip(request) or '0.0.0.0'
|
||||
user_agent = request.META.get("HTTP_USER_AGENT", "")
|
||||
login_ip = get_request_ip(request) or "0.0.0.0"
|
||||
|
||||
if login_type is None and isinstance(request, Request):
|
||||
login_type = request.META.get('HTTP_X_JMS_LOGIN_TYPE', 'U')
|
||||
login_type = request.META.get("HTTP_X_JMS_LOGIN_TYPE", "U")
|
||||
if login_type is None:
|
||||
login_type = 'W'
|
||||
login_type = "W"
|
||||
|
||||
with translation.override('en'):
|
||||
with translation.override("en"):
|
||||
backend = str(get_login_backend(request))
|
||||
|
||||
data = {
|
||||
'username': username,
|
||||
'ip': login_ip,
|
||||
'type': login_type,
|
||||
'user_agent': user_agent[0:254],
|
||||
'datetime': timezone.now(),
|
||||
'backend': backend,
|
||||
"username": username,
|
||||
"ip": login_ip,
|
||||
"type": login_type,
|
||||
"user_agent": user_agent[0:254],
|
||||
"datetime": timezone.now(),
|
||||
"backend": backend,
|
||||
}
|
||||
return data
|
||||
|
||||
|
||||
@receiver(post_auth_success)
|
||||
def on_user_auth_success(sender, user, request, login_type=None, **kwargs):
|
||||
logger.debug('User login success: {}'.format(user.username))
|
||||
logger.debug("User login success: {}".format(user.username))
|
||||
check_different_city_login_if_need(user, request)
|
||||
data = generate_data(user.username, request, login_type=login_type)
|
||||
request.session['login_time'] = data['datetime'].strftime("%Y-%m-%d %H:%M:%S")
|
||||
data.update({'mfa': int(user.mfa_enabled), 'status': True})
|
||||
request.session["login_time"] = data["datetime"].strftime("%Y-%m-%d %H:%M:%S")
|
||||
data.update({"mfa": int(user.mfa_enabled), "status": True})
|
||||
write_login_log(**data)
|
||||
|
||||
|
||||
@receiver(post_auth_failed)
|
||||
def on_user_auth_failed(sender, username, request, reason='', **kwargs):
|
||||
logger.debug('User login failed: {}'.format(username))
|
||||
def on_user_auth_failed(sender, username, request, reason="", **kwargs):
|
||||
logger.debug("User login failed: {}".format(username))
|
||||
data = generate_data(username, request)
|
||||
data.update({'reason': reason[:128], 'status': False})
|
||||
data.update({"reason": reason[:128], "status": False})
|
||||
write_login_log(**data)
|
||||
|
|
|
@ -1,33 +1,32 @@
|
|||
import os
|
||||
import abc
|
||||
import json
|
||||
import time
|
||||
import base64
|
||||
import json
|
||||
import os
|
||||
import time
|
||||
import urllib.parse
|
||||
|
||||
from django.http import HttpResponse
|
||||
from django.shortcuts import get_object_or_404
|
||||
from rest_framework.request import Request
|
||||
from django.utils import timezone
|
||||
from rest_framework import status
|
||||
from rest_framework.exceptions import PermissionDenied
|
||||
from rest_framework.decorators import action
|
||||
from rest_framework.response import Response
|
||||
from rest_framework.exceptions import PermissionDenied
|
||||
from rest_framework.request import Request
|
||||
from rest_framework.response import Response
|
||||
|
||||
from common.drf.api import JMSModelViewSet
|
||||
from common.http import is_true
|
||||
from orgs.mixins.api import RootOrgViewMixin
|
||||
from perms.models import Action
|
||||
from orgs.utils import tmp_to_root_org
|
||||
from perms.models import ActionChoices
|
||||
from terminal.models import EndpointRule
|
||||
from ..models import ConnectionToken
|
||||
from ..serializers import (
|
||||
ConnectionTokenSerializer, ConnectionTokenSecretSerializer,
|
||||
SuperConnectionTokenSerializer, ConnectionTokenDisplaySerializer,
|
||||
)
|
||||
from ..models import ConnectionToken
|
||||
|
||||
__all__ = ['ConnectionTokenViewSet', 'SuperConnectionTokenViewSet']
|
||||
|
||||
# ExtraActionApiMixin
|
||||
|
||||
|
||||
class RDPFileClientProtocolURLMixin:
|
||||
request: Request
|
||||
|
@ -70,8 +69,7 @@ class RDPFileClientProtocolURLMixin:
|
|||
# 设置磁盘挂载
|
||||
drives_redirect = is_true(self.request.query_params.get('drives_redirect'))
|
||||
if drives_redirect:
|
||||
actions = Action.choices_to_value(token.actions)
|
||||
if actions & Action.UPDOWNLOAD == Action.UPDOWNLOAD:
|
||||
if ActionChoices.contains(token.actions, ActionChoices.transfer()):
|
||||
rdp_options['drivestoredirect:s'] = '*'
|
||||
|
||||
# 设置全屏
|
||||
|
@ -179,22 +177,10 @@ class ExtraActionApiMixin(RDPFileClientProtocolURLMixin):
|
|||
get_serializer: callable
|
||||
perform_create: callable
|
||||
|
||||
@action(methods=['POST'], detail=False, url_path='secret-info/detail')
|
||||
def get_secret_detail(self, request, *args, **kwargs):
|
||||
""" 非常重要的 api, 在逻辑层再判断一下 rbac 权限, 双重保险 """
|
||||
rbac_perm = 'authentication.view_connectiontokensecret'
|
||||
if not request.user.has_perm(rbac_perm):
|
||||
raise PermissionDenied('Not allow to view secret')
|
||||
token_id = request.data.get('token') or ''
|
||||
token = get_object_or_404(ConnectionToken, pk=token_id)
|
||||
self.check_token_permission(token)
|
||||
serializer = self.get_serializer(instance=token)
|
||||
return Response(serializer.data, status=status.HTTP_200_OK)
|
||||
|
||||
@action(methods=['POST', 'GET'], detail=False, url_path='rdp/file')
|
||||
def get_rdp_file(self, request, *args, **kwargs):
|
||||
token = self.create_connection_token()
|
||||
self.check_token_permission(token)
|
||||
token.is_valid()
|
||||
filename, content = self.get_rdp_file_info(token)
|
||||
filename = '{}.rdp'.format(filename)
|
||||
response = HttpResponse(content, content_type='application/octet-stream')
|
||||
|
@ -204,7 +190,7 @@ class ExtraActionApiMixin(RDPFileClientProtocolURLMixin):
|
|||
@action(methods=['POST', 'GET'], detail=False, url_path='client-url')
|
||||
def get_client_protocol_url(self, request, *args, **kwargs):
|
||||
token = self.create_connection_token()
|
||||
self.check_token_permission(token)
|
||||
token.is_valid()
|
||||
try:
|
||||
protocol_data = self.get_client_protocol_data(token)
|
||||
except ValueError as e:
|
||||
|
@ -222,12 +208,6 @@ class ExtraActionApiMixin(RDPFileClientProtocolURLMixin):
|
|||
instance.expire()
|
||||
return Response(status=status.HTTP_204_NO_CONTENT)
|
||||
|
||||
@staticmethod
|
||||
def check_token_permission(token: ConnectionToken):
|
||||
is_valid, error = token.check_permission()
|
||||
if not is_valid:
|
||||
raise PermissionDenied(error)
|
||||
|
||||
def create_connection_token(self):
|
||||
data = self.request.query_params if self.request.method == 'GET' else self.request.data
|
||||
serializer = self.get_serializer(data=data)
|
||||
|
@ -257,6 +237,22 @@ class ConnectionTokenViewSet(ExtraActionApiMixin, RootOrgViewMixin, JMSModelView
|
|||
'get_client_protocol_url': 'authentication.add_connectiontoken',
|
||||
}
|
||||
|
||||
@action(methods=['POST'], detail=False, url_path='secret')
|
||||
def get_secret_detail(self, request, *args, **kwargs):
|
||||
""" 非常重要的 api, 在逻辑层再判断一下 rbac 权限, 双重保险 """
|
||||
rbac_perm = 'authentication.view_connectiontokensecret'
|
||||
if not request.user.has_perm(rbac_perm):
|
||||
raise PermissionDenied('Not allow to view secret')
|
||||
token_id = request.data.get('token') or ''
|
||||
token = get_object_or_404(ConnectionToken, pk=token_id)
|
||||
token.is_valid()
|
||||
serializer = self.get_serializer(instance=token)
|
||||
return Response(serializer.data, status=status.HTTP_200_OK)
|
||||
|
||||
def dispatch(self, request, *args, **kwargs):
|
||||
with tmp_to_root_org():
|
||||
return super().dispatch(request, *args, **kwargs)
|
||||
|
||||
def get_queryset(self):
|
||||
return ConnectionToken.objects.filter(user=self.request.user)
|
||||
|
||||
|
@ -264,25 +260,36 @@ class ConnectionTokenViewSet(ExtraActionApiMixin, RootOrgViewMixin, JMSModelView
|
|||
return self.request.user
|
||||
|
||||
def perform_create(self, serializer):
|
||||
user = self.get_user(serializer)
|
||||
asset = serializer.validated_data.get('asset')
|
||||
account_username = serializer.validated_data.get('account_username')
|
||||
self.validate_asset_permission(user, asset, account_username)
|
||||
return super(ConnectionTokenViewSet, self).perform_create(serializer)
|
||||
self.validate_serializer(serializer)
|
||||
return super().perform_create(serializer)
|
||||
|
||||
@staticmethod
|
||||
def validate_asset_permission(user, asset, account_username):
|
||||
def validate_serializer(self, serializer):
|
||||
from perms.utils.account import PermAccountUtil
|
||||
actions, expire_at = PermAccountUtil().validate_permission(user, asset, account_username)
|
||||
if not actions:
|
||||
error = 'No actions'
|
||||
raise PermissionDenied(error)
|
||||
if expire_at < time.time():
|
||||
error = 'Expired'
|
||||
raise PermissionDenied(error)
|
||||
|
||||
data = serializer.validated_data
|
||||
user = self.get_user(serializer)
|
||||
asset = data.get('asset')
|
||||
login = data.get('login')
|
||||
data['org_id'] = asset.org_id
|
||||
data['user'] = user
|
||||
|
||||
# SuperConnectionToken
|
||||
util = PermAccountUtil()
|
||||
permed_account = util.validate_permission(user, asset, login)
|
||||
|
||||
if not permed_account or not permed_account.actions:
|
||||
msg = 'user `{}` not has asset `{}` permission for login `{}`'.format(
|
||||
user, asset, login
|
||||
)
|
||||
raise PermissionDenied(msg)
|
||||
|
||||
if permed_account.date_expired < timezone.now():
|
||||
raise PermissionDenied('Expired')
|
||||
|
||||
if permed_account.has_secret:
|
||||
data['secret'] = ''
|
||||
if permed_account.username != '@INPUT':
|
||||
data['username'] = ''
|
||||
return permed_account
|
||||
|
||||
|
||||
class SuperConnectionTokenViewSet(ConnectionTokenViewSet):
|
||||
|
|
|
@ -16,7 +16,7 @@ def migrate_system_user_to_account(apps, schema_editor):
|
|||
count += len(connection_tokens)
|
||||
updated = []
|
||||
for connection_token in connection_tokens:
|
||||
connection_token.account_username = connection_token.system_user.username
|
||||
connection_token.account = connection_token.system_user.username
|
||||
updated.append(connection_token)
|
||||
connection_token_model.objects.bulk_update(updated, ['account_username'])
|
||||
|
||||
|
|
|
@ -0,0 +1,34 @@
|
|||
# Generated by Django 3.2.14 on 2022-11-22 13:52
|
||||
|
||||
import common.db.fields
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('authentication', '0013_connectiontoken_protocol'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.RenameField(
|
||||
model_name='connectiontoken',
|
||||
old_name='account_username',
|
||||
new_name='login'
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='connectiontoken',
|
||||
name='login',
|
||||
field=models.CharField(max_length=128, verbose_name='Login account'),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='connectiontoken',
|
||||
name='username',
|
||||
field=models.CharField(default='', max_length=128, verbose_name='Username'),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='connectiontoken',
|
||||
name='secret',
|
||||
field=common.db.fields.EncryptCharField(default='', max_length=128, verbose_name='Secret'),
|
||||
),
|
||||
]
|
|
@ -0,0 +1,18 @@
|
|||
# Generated by Django 3.2.14 on 2022-11-23 02:26
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('authentication', '0014_auto_20221122_2152'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AlterField(
|
||||
model_name='connectiontoken',
|
||||
name='login',
|
||||
field=models.CharField(max_length=128, verbose_name='Login account'),
|
||||
),
|
||||
]
|
|
@ -2,13 +2,15 @@ import time
|
|||
from datetime import timedelta
|
||||
from django.utils import timezone
|
||||
from django.utils.translation import ugettext_lazy as _
|
||||
from django.conf import settings
|
||||
from orgs.mixins.models import OrgModelMixin
|
||||
|
||||
from django.db import models
|
||||
from common.utils import lazyproperty
|
||||
from django.conf import settings
|
||||
from rest_framework.exceptions import PermissionDenied
|
||||
|
||||
from orgs.mixins.models import OrgModelMixin
|
||||
from common.utils import lazyproperty, pretty_string
|
||||
from common.utils.timezone import as_current_tz
|
||||
from common.db.models import JMSBaseModel
|
||||
from common.db.fields import EncryptCharField
|
||||
from assets.const import Protocol
|
||||
|
||||
|
||||
|
@ -25,13 +27,14 @@ class ConnectionToken(OrgModelMixin, JMSBaseModel):
|
|||
'assets.Asset', on_delete=models.SET_NULL, null=True, blank=True,
|
||||
related_name='connection_tokens', verbose_name=_('Asset'),
|
||||
)
|
||||
login = models.CharField(max_length=128, verbose_name=_("Login account"))
|
||||
username = models.CharField(max_length=128, default='', verbose_name=_("Username"))
|
||||
secret = EncryptCharField(max_length=64, default='', verbose_name=_("Secret"))
|
||||
protocol = models.CharField(
|
||||
choices=Protocol.choices, max_length=16, default=Protocol.ssh, verbose_name=_("Protocol")
|
||||
)
|
||||
user_display = models.CharField(max_length=128, default='', verbose_name=_("User display"))
|
||||
asset_display = models.CharField(max_length=128, default='', verbose_name=_("Asset display"))
|
||||
account_username = models.CharField(max_length=128, default='', verbose_name=_("Account"))
|
||||
secret = models.CharField(max_length=64, default='', verbose_name=_("Secret"))
|
||||
date_expired = models.DateTimeField(
|
||||
default=date_expired_default, verbose_name=_("Date expired")
|
||||
)
|
||||
|
@ -43,10 +46,6 @@ class ConnectionToken(OrgModelMixin, JMSBaseModel):
|
|||
('view_connectiontokensecret', _('Can view connection token secret'))
|
||||
]
|
||||
|
||||
@property
|
||||
def is_valid(self):
|
||||
return not self.is_expired
|
||||
|
||||
@property
|
||||
def is_expired(self):
|
||||
return self.date_expired < timezone.now()
|
||||
|
@ -59,9 +58,10 @@ class ConnectionToken(OrgModelMixin, JMSBaseModel):
|
|||
seconds = 0
|
||||
return int(seconds)
|
||||
|
||||
@classmethod
|
||||
def get_default_date_expired(cls):
|
||||
return date_expired_default()
|
||||
def save(self, *args, **kwargs):
|
||||
self.asset_display = pretty_string(self.asset, max_length=128)
|
||||
self.user_display = pretty_string(self.user, max_length=128)
|
||||
return super().save(*args, **kwargs)
|
||||
|
||||
def expire(self):
|
||||
self.date_expired = timezone.now()
|
||||
|
@ -69,48 +69,74 @@ class ConnectionToken(OrgModelMixin, JMSBaseModel):
|
|||
|
||||
def renewal(self):
|
||||
""" 续期 Token,将来支持用户自定义创建 token 后,续期策略要修改 """
|
||||
self.date_expired = self.get_default_date_expired()
|
||||
self.date_expired = date_expired_default()
|
||||
self.save()
|
||||
|
||||
# actions 和 expired_at 在 check_valid() 中赋值
|
||||
actions = expire_at = None
|
||||
@lazyproperty
|
||||
def permed_account(self):
|
||||
from perms.utils import PermAccountUtil
|
||||
permed_account = PermAccountUtil().validate_permission(
|
||||
self.user, self.asset, self.login
|
||||
)
|
||||
return permed_account
|
||||
|
||||
def check_permission(self):
|
||||
from perms.utils.account import PermAccountUtil
|
||||
@lazyproperty
|
||||
def actions(self):
|
||||
return self.permed_account.actions
|
||||
|
||||
@lazyproperty
|
||||
def expire_at(self):
|
||||
return self.permed_account.date_expired.timestamp()
|
||||
|
||||
def is_valid(self):
|
||||
if self.is_expired:
|
||||
is_valid = False
|
||||
error = _('Connection token expired at: {}').format(as_current_tz(self.date_expired))
|
||||
return is_valid, error
|
||||
raise PermissionDenied(error)
|
||||
if not self.user or not self.user.is_valid:
|
||||
is_valid = False
|
||||
error = _('No user or invalid user')
|
||||
return is_valid, error
|
||||
raise PermissionDenied(error)
|
||||
if not self.asset or not self.asset.is_active:
|
||||
is_valid = False
|
||||
error = _('No asset or inactive asset')
|
||||
return is_valid, error
|
||||
if not self.account_username:
|
||||
is_valid = False
|
||||
if not self.login:
|
||||
error = _('No account')
|
||||
return is_valid, error
|
||||
actions, expire_at = PermAccountUtil().validate_permission(
|
||||
self.user, self.asset, self.account_username
|
||||
)
|
||||
if not actions or expire_at < time.time():
|
||||
is_valid = False
|
||||
error = _('User has no permission to access asset or permission expired')
|
||||
return is_valid, error
|
||||
self.actions = actions
|
||||
self.expire_at = expire_at
|
||||
is_valid, error = True, ''
|
||||
return is_valid, error
|
||||
raise PermissionDenied(error)
|
||||
|
||||
if not self.permed_account or not self.permed_account.actions:
|
||||
msg = 'user `{}` not has asset `{}` permission for login `{}`'.format(
|
||||
self.user, self.asset, self.login
|
||||
)
|
||||
raise PermissionDenied(msg)
|
||||
|
||||
if self.permed_account.date_expired < timezone.now():
|
||||
raise PermissionDenied('Expired')
|
||||
return True
|
||||
|
||||
@lazyproperty
|
||||
def platform(self):
|
||||
return self.asset.platform
|
||||
|
||||
@lazyproperty
|
||||
def account(self):
|
||||
if not self.asset:
|
||||
return None
|
||||
account = self.asset.accounts.filter(username=self.account_username).first()
|
||||
return account
|
||||
|
||||
account = self.asset.accounts.filter(name=self.login).first()
|
||||
if self.login == '@INPUT' or not account:
|
||||
return {
|
||||
'name': self.login,
|
||||
'username': self.username,
|
||||
'secret_type': 'password',
|
||||
'secret': self.secret
|
||||
}
|
||||
else:
|
||||
return {
|
||||
'name': account.name,
|
||||
'username': account.username,
|
||||
'secret_type': account.secret_type,
|
||||
'secret': account.secret_type or self.secret
|
||||
}
|
||||
|
||||
@lazyproperty
|
||||
def domain(self):
|
||||
|
|
|
@ -1,14 +1,12 @@
|
|||
from django.utils.translation import ugettext_lazy as _
|
||||
from rest_framework import serializers
|
||||
|
||||
from django.utils.translation import ugettext_lazy as _
|
||||
from orgs.mixins.serializers import OrgResourceModelSerializerMixin
|
||||
from assets.serializers import PlatformSerializer
|
||||
from assets.models import Asset, Domain, CommandFilterRule, Account, Platform
|
||||
from authentication.models import ConnectionToken
|
||||
from common.utils import pretty_string
|
||||
from common.utils.random import random_string
|
||||
from assets.models import Asset, Gateway, Domain, CommandFilterRule, Account
|
||||
from orgs.mixins.serializers import OrgResourceModelSerializerMixin
|
||||
from perms.serializers.permission import ActionChoicesField
|
||||
from users.models import User
|
||||
from perms.serializers.permission import ActionsField
|
||||
|
||||
|
||||
__all__ = [
|
||||
'ConnectionTokenSerializer', 'ConnectionTokenSecretSerializer',
|
||||
|
@ -17,23 +15,25 @@ __all__ = [
|
|||
|
||||
|
||||
class ConnectionTokenSerializer(OrgResourceModelSerializerMixin):
|
||||
is_valid = serializers.BooleanField(read_only=True, label=_('Validity'))
|
||||
username = serializers.CharField(max_length=128, label=_("Input username"),
|
||||
allow_null=True, allow_blank=True)
|
||||
expire_time = serializers.IntegerField(read_only=True, label=_('Expired time'))
|
||||
|
||||
class Meta:
|
||||
model = ConnectionToken
|
||||
fields_mini = ['id']
|
||||
fields_small = fields_mini + [
|
||||
'secret', 'account_username', 'date_expired',
|
||||
'date_created', 'date_updated',
|
||||
'created_by', 'updated_by', 'org_id', 'org_name',
|
||||
'protocol', 'login', 'secret', 'username',
|
||||
'actions', 'date_expired', 'date_created',
|
||||
'date_updated', 'created_by',
|
||||
'updated_by', 'org_id', 'org_name',
|
||||
]
|
||||
fields_fk = [
|
||||
'user', 'asset',
|
||||
]
|
||||
read_only_fields = [
|
||||
# 普通 Token 不支持指定 user
|
||||
'user', 'is_valid', 'expire_time',
|
||||
'user', 'expire_time',
|
||||
'user_display', 'asset_display',
|
||||
]
|
||||
fields = fields_small + fields_fk + read_only_fields
|
||||
|
@ -46,32 +46,6 @@ class ConnectionTokenSerializer(OrgResourceModelSerializerMixin):
|
|||
def get_user(self, attrs):
|
||||
return self.get_request_user()
|
||||
|
||||
def validate(self, attrs):
|
||||
fields_attrs = self.construct_internal_fields_attrs(attrs)
|
||||
attrs.update(fields_attrs)
|
||||
return attrs
|
||||
|
||||
def construct_internal_fields_attrs(self, attrs):
|
||||
asset = attrs.get('asset') or ''
|
||||
asset_display = pretty_string(str(asset), max_length=128)
|
||||
user = self.get_user(attrs)
|
||||
user_display = pretty_string(str(user), max_length=128)
|
||||
secret = attrs.get('secret') or random_string(16)
|
||||
date_expired = attrs.get('date_expired') or ConnectionToken.get_default_date_expired()
|
||||
org_id = asset.org_id
|
||||
if not isinstance(asset, Asset):
|
||||
error = ''
|
||||
raise serializers.ValidationError(error)
|
||||
attrs = {
|
||||
'user': user,
|
||||
'secret': secret,
|
||||
'user_display': user_display,
|
||||
'asset_display': asset_display,
|
||||
'date_expired': date_expired,
|
||||
'org_id': org_id,
|
||||
}
|
||||
return attrs
|
||||
|
||||
|
||||
class ConnectionTokenDisplaySerializer(ConnectionTokenSerializer):
|
||||
class Meta(ConnectionTokenSerializer.Meta):
|
||||
|
@ -86,7 +60,6 @@ class ConnectionTokenDisplaySerializer(ConnectionTokenSerializer):
|
|||
|
||||
|
||||
class SuperConnectionTokenSerializer(ConnectionTokenSerializer):
|
||||
|
||||
class Meta(ConnectionTokenSerializer.Meta):
|
||||
read_only_fields = [
|
||||
'validity', 'user_display', 'system_user_display',
|
||||
|
@ -104,6 +77,7 @@ class SuperConnectionTokenSerializer(ConnectionTokenSerializer):
|
|||
|
||||
class ConnectionTokenUserSerializer(serializers.ModelSerializer):
|
||||
""" User """
|
||||
|
||||
class Meta:
|
||||
model = User
|
||||
fields = ['id', 'name', 'username', 'email']
|
||||
|
@ -111,6 +85,7 @@ class ConnectionTokenUserSerializer(serializers.ModelSerializer):
|
|||
|
||||
class ConnectionTokenAssetSerializer(serializers.ModelSerializer):
|
||||
""" Asset """
|
||||
|
||||
class Meta:
|
||||
model = Asset
|
||||
fields = ['id', 'name', 'address', 'protocols', 'org_id']
|
||||
|
@ -118,18 +93,20 @@ class ConnectionTokenAssetSerializer(serializers.ModelSerializer):
|
|||
|
||||
class ConnectionTokenAccountSerializer(serializers.ModelSerializer):
|
||||
""" Account """
|
||||
|
||||
class Meta:
|
||||
model = Account
|
||||
fields = [
|
||||
'id', 'name', 'username', 'secret_type', 'secret', 'version'
|
||||
'name', 'username', 'secret_type', 'secret',
|
||||
]
|
||||
|
||||
|
||||
class ConnectionTokenGatewaySerializer(serializers.ModelSerializer):
|
||||
""" Gateway """
|
||||
|
||||
class Meta:
|
||||
model = Gateway
|
||||
fields = ['id', 'ip', 'port', 'username', 'password', 'private_key']
|
||||
model = Asset
|
||||
fields = ['id', 'address', 'port', 'username', 'password', 'private_key']
|
||||
|
||||
|
||||
class ConnectionTokenDomainSerializer(serializers.ModelSerializer):
|
||||
|
@ -143,6 +120,7 @@ class ConnectionTokenDomainSerializer(serializers.ModelSerializer):
|
|||
|
||||
class ConnectionTokenCmdFilterRuleSerializer(serializers.ModelSerializer):
|
||||
""" Command filter rule """
|
||||
|
||||
class Meta:
|
||||
model = CommandFilterRule
|
||||
fields = [
|
||||
|
@ -151,21 +129,30 @@ class ConnectionTokenCmdFilterRuleSerializer(serializers.ModelSerializer):
|
|||
]
|
||||
|
||||
|
||||
class ConnectionTokenPlatform(PlatformSerializer):
|
||||
class Meta(PlatformSerializer.Meta):
|
||||
model = Platform
|
||||
|
||||
def get_field_names(self, declared_fields, info):
|
||||
names = super().get_field_names(declared_fields, info)
|
||||
names = [n for n in names if n not in ['automation']]
|
||||
return names
|
||||
|
||||
|
||||
class ConnectionTokenSecretSerializer(OrgResourceModelSerializerMixin):
|
||||
user = ConnectionTokenUserSerializer(read_only=True)
|
||||
asset = ConnectionTokenAssetSerializer(read_only=True)
|
||||
platform = ConnectionTokenPlatform(read_only=True)
|
||||
account = ConnectionTokenAccountSerializer(read_only=True)
|
||||
gateway = ConnectionTokenGatewaySerializer(read_only=True)
|
||||
domain = ConnectionTokenDomainSerializer(read_only=True)
|
||||
cmd_filter_rules = ConnectionTokenCmdFilterRuleSerializer(many=True)
|
||||
actions = ActionsField()
|
||||
# cmd_filter_rules = ConnectionTokenCmdFilterRuleSerializer(many=True)
|
||||
actions = ActionChoicesField()
|
||||
expire_at = serializers.IntegerField()
|
||||
|
||||
class Meta:
|
||||
model = ConnectionToken
|
||||
fields = [
|
||||
'id', 'secret',
|
||||
'user', 'asset', 'account_username', 'account', 'protocol',
|
||||
'domain', 'gateway', 'cmd_filter_rules',
|
||||
'actions', 'expire_at',
|
||||
'id', 'secret', 'user', 'asset', 'account',
|
||||
'protocol', 'domain', 'gateway',
|
||||
'actions', 'expire_at', 'platform',
|
||||
]
|
||||
|
|
|
@ -1,27 +1,28 @@
|
|||
from urllib.parse import urlencode
|
||||
|
||||
from django.conf import settings
|
||||
from django.db.utils import IntegrityError
|
||||
from django.http.request import HttpRequest
|
||||
from django.http.response import HttpResponseRedirect
|
||||
from django.utils.translation import ugettext_lazy as _
|
||||
from urllib.parse import urlencode
|
||||
from django.views import View
|
||||
from django.conf import settings
|
||||
from django.http.request import HttpRequest
|
||||
from django.db.utils import IntegrityError
|
||||
from rest_framework.permissions import IsAuthenticated, AllowAny
|
||||
from rest_framework.exceptions import APIException
|
||||
from rest_framework.permissions import AllowAny, IsAuthenticated
|
||||
|
||||
from authentication import errors
|
||||
from authentication.const import ConfirmType
|
||||
from authentication.mixins import AuthMixin
|
||||
from authentication.notifications import OAuthBindMessage
|
||||
from common.mixins.views import PermissionsMixin, UserConfirmRequiredExceptionMixin
|
||||
from common.permissions import UserConfirmation
|
||||
from common.sdk.im.dingtalk import URL, DingTalk
|
||||
from common.utils import FlashMessageUtil, get_logger
|
||||
from common.utils.common import get_request_ip
|
||||
from common.utils.django import get_object_or_none, reverse
|
||||
from common.utils.random import random_string
|
||||
from users.models import User
|
||||
from users.views import UserVerifyPasswordView
|
||||
from common.utils import get_logger, FlashMessageUtil
|
||||
from common.utils.random import random_string
|
||||
from common.utils.django import reverse, get_object_or_none
|
||||
from common.sdk.im.dingtalk import URL
|
||||
from common.mixins.views import UserConfirmRequiredExceptionMixin, PermissionsMixin
|
||||
from common.permissions import UserConfirmation
|
||||
from authentication import errors
|
||||
from authentication.mixins import AuthMixin
|
||||
from authentication.const import ConfirmType
|
||||
from common.sdk.im.dingtalk import DingTalk
|
||||
from common.utils.common import get_request_ip
|
||||
from authentication.notifications import OAuthBindMessage
|
||||
|
||||
from .mixins import METAMixin
|
||||
|
||||
logger = get_logger(__file__)
|
||||
|
|
|
@ -1,26 +1,27 @@
|
|||
from urllib.parse import urlencode
|
||||
|
||||
from django.conf import settings
|
||||
from django.db.utils import IntegrityError
|
||||
from django.http.request import HttpRequest
|
||||
from django.http.response import HttpResponseRedirect
|
||||
from django.utils.translation import ugettext_lazy as _
|
||||
from urllib.parse import urlencode
|
||||
from django.views import View
|
||||
from django.conf import settings
|
||||
from django.http.request import HttpRequest
|
||||
from django.db.utils import IntegrityError
|
||||
from rest_framework.permissions import IsAuthenticated, AllowAny
|
||||
from rest_framework.exceptions import APIException
|
||||
from rest_framework.permissions import AllowAny, IsAuthenticated
|
||||
|
||||
from users.models import User
|
||||
from users.views import UserVerifyPasswordView
|
||||
from common.utils import get_logger, FlashMessageUtil
|
||||
from common.utils.random import random_string
|
||||
from common.utils.django import reverse, get_object_or_none
|
||||
from common.mixins.views import UserConfirmRequiredExceptionMixin, PermissionsMixin
|
||||
from common.permissions import UserConfirmation
|
||||
from common.sdk.im.feishu import FeiShu, URL
|
||||
from common.utils.common import get_request_ip
|
||||
from authentication import errors
|
||||
from authentication.const import ConfirmType
|
||||
from authentication.mixins import AuthMixin
|
||||
from authentication.notifications import OAuthBindMessage
|
||||
from common.mixins.views import PermissionsMixin, UserConfirmRequiredExceptionMixin
|
||||
from common.permissions import UserConfirmation
|
||||
from common.sdk.im.feishu import URL, FeiShu
|
||||
from common.utils import FlashMessageUtil, get_logger
|
||||
from common.utils.common import get_request_ip
|
||||
from common.utils.django import get_object_or_none, reverse
|
||||
from common.utils.random import random_string
|
||||
from users.models import User
|
||||
from users.views import UserVerifyPasswordView
|
||||
|
||||
logger = get_logger(__file__)
|
||||
|
||||
|
|
|
@ -1,19 +1,32 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
#
|
||||
import json
|
||||
from django.db import models
|
||||
from django.utils.translation import ugettext_lazy as _
|
||||
from django.utils.encoding import force_text
|
||||
|
||||
from django.core.validators import MinValueValidator, MaxValueValidator
|
||||
from django.db import models
|
||||
from django.utils.encoding import force_text
|
||||
from django.utils.translation import ugettext_lazy as _
|
||||
|
||||
from common.utils import signer, crypto
|
||||
|
||||
|
||||
__all__ = [
|
||||
'JsonMixin', 'JsonDictMixin', 'JsonListMixin', 'JsonTypeMixin',
|
||||
'JsonCharField', 'JsonTextField', 'JsonListCharField', 'JsonListTextField',
|
||||
'JsonDictCharField', 'JsonDictTextField', 'EncryptCharField',
|
||||
'EncryptTextField', 'EncryptMixin', 'EncryptJsonDictTextField',
|
||||
'EncryptJsonDictCharField', 'PortField'
|
||||
"JsonMixin",
|
||||
"JsonDictMixin",
|
||||
"JsonListMixin",
|
||||
"JsonTypeMixin",
|
||||
"JsonCharField",
|
||||
"JsonTextField",
|
||||
"JsonListCharField",
|
||||
"JsonListTextField",
|
||||
"JsonDictCharField",
|
||||
"JsonDictTextField",
|
||||
"EncryptCharField",
|
||||
"EncryptTextField",
|
||||
"EncryptMixin",
|
||||
"EncryptJsonDictTextField",
|
||||
"EncryptJsonDictCharField",
|
||||
"PortField",
|
||||
"BitChoices",
|
||||
]
|
||||
|
||||
|
||||
|
@ -114,7 +127,7 @@ class EncryptMixin:
|
|||
"""
|
||||
|
||||
def decrypt_from_signer(self, value):
|
||||
return signer.unsign(value) or ''
|
||||
return signer.unsign(value) or ""
|
||||
|
||||
def from_db_value(self, value, expression, connection, context=None):
|
||||
if not value:
|
||||
|
@ -129,7 +142,7 @@ class EncryptMixin:
|
|||
|
||||
# 可能和Json mix,所以要先解密,再json
|
||||
sp = super()
|
||||
if hasattr(sp, 'from_db_value'):
|
||||
if hasattr(sp, "from_db_value"):
|
||||
plain_value = sp.from_db_value(plain_value, expression, connection, context)
|
||||
return plain_value
|
||||
|
||||
|
@ -139,7 +152,7 @@ class EncryptMixin:
|
|||
|
||||
# 先 json 再解密
|
||||
sp = super()
|
||||
if hasattr(sp, 'get_prep_value'):
|
||||
if hasattr(sp, "get_prep_value"):
|
||||
value = sp.get_prep_value(value)
|
||||
value = force_text(value)
|
||||
# 替换新的加密方式
|
||||
|
@ -153,12 +166,12 @@ class EncryptTextField(EncryptMixin, models.TextField):
|
|||
class EncryptCharField(EncryptMixin, models.CharField):
|
||||
@staticmethod
|
||||
def change_max_length(kwargs):
|
||||
kwargs.setdefault('max_length', 1024)
|
||||
max_length = kwargs.get('max_length')
|
||||
kwargs.setdefault("max_length", 1024)
|
||||
max_length = kwargs.get("max_length")
|
||||
if max_length < 129:
|
||||
max_length = 128
|
||||
max_length = max_length * 2
|
||||
kwargs['max_length'] = max_length
|
||||
kwargs["max_length"] = max_length
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
self.change_max_length(kwargs)
|
||||
|
@ -166,10 +179,10 @@ class EncryptCharField(EncryptMixin, models.CharField):
|
|||
|
||||
def deconstruct(self):
|
||||
name, path, args, kwargs = super().deconstruct()
|
||||
max_length = kwargs.pop('max_length')
|
||||
max_length = kwargs.pop("max_length")
|
||||
if max_length > 255:
|
||||
max_length = max_length // 2
|
||||
kwargs['max_length'] = max_length
|
||||
kwargs["max_length"] = max_length
|
||||
return name, path, args, kwargs
|
||||
|
||||
|
||||
|
@ -183,10 +196,50 @@ class EncryptJsonDictCharField(EncryptMixin, JsonDictCharField):
|
|||
|
||||
class PortField(models.IntegerField):
|
||||
def __init__(self, *args, **kwargs):
|
||||
kwargs.update({
|
||||
'blank': False,
|
||||
'null': False,
|
||||
'validators': [MinValueValidator(0), MaxValueValidator(65535)]
|
||||
})
|
||||
kwargs.update(
|
||||
{
|
||||
"blank": False,
|
||||
"null": False,
|
||||
"validators": [MinValueValidator(0), MaxValueValidator(65535)],
|
||||
}
|
||||
)
|
||||
super().__init__(*args, **kwargs)
|
||||
|
||||
|
||||
class BitChoices(models.IntegerChoices):
|
||||
@classmethod
|
||||
def branches(cls):
|
||||
return [i for i in cls]
|
||||
|
||||
@classmethod
|
||||
def is_tree(cls):
|
||||
return False
|
||||
|
||||
@classmethod
|
||||
def tree(cls):
|
||||
if not cls.is_tree():
|
||||
return []
|
||||
root = [_("All"), cls.branches()]
|
||||
return [cls.render_node(root)]
|
||||
|
||||
@classmethod
|
||||
def render_node(cls, node):
|
||||
if isinstance(node, BitChoices):
|
||||
return {
|
||||
"value": node.name,
|
||||
"label": node.label,
|
||||
}
|
||||
else:
|
||||
name, children = node
|
||||
return {
|
||||
"value": name,
|
||||
"label": name,
|
||||
"children": [cls.render_node(child) for child in children],
|
||||
}
|
||||
|
||||
@classmethod
|
||||
def all(cls):
|
||||
value = 0
|
||||
for c in cls:
|
||||
value |= c.value
|
||||
return value
|
||||
|
|
|
@ -1,17 +1,21 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
#
|
||||
import six
|
||||
|
||||
from rest_framework.fields import ChoiceField
|
||||
from rest_framework import serializers
|
||||
from django.utils.translation import gettext_lazy as _
|
||||
from django.core.exceptions import ObjectDoesNotExist
|
||||
from django.utils.translation import gettext_lazy as _
|
||||
from rest_framework import serializers
|
||||
from rest_framework.fields import ChoiceField, empty
|
||||
|
||||
from common.db.fields import BitChoices
|
||||
from common.utils import decrypt_password
|
||||
|
||||
__all__ = [
|
||||
'ReadableHiddenField', 'EncryptedField', 'LabeledChoiceField',
|
||||
'ObjectRelatedField',
|
||||
"ReadableHiddenField",
|
||||
"EncryptedField",
|
||||
"LabeledChoiceField",
|
||||
"ObjectRelatedField",
|
||||
"BitChoicesField",
|
||||
"TreeChoicesMixin"
|
||||
]
|
||||
|
||||
|
||||
|
@ -20,14 +24,15 @@ __all__ = [
|
|||
|
||||
|
||||
class ReadableHiddenField(serializers.HiddenField):
|
||||
""" 可读的 HiddenField """
|
||||
"""可读的 HiddenField"""
|
||||
|
||||
def __init__(self, **kwargs):
|
||||
super().__init__(**kwargs)
|
||||
self.write_only = False
|
||||
|
||||
def to_representation(self, value):
|
||||
if hasattr(value, 'id'):
|
||||
return getattr(value, 'id')
|
||||
if hasattr(value, "id"):
|
||||
return getattr(value, "id")
|
||||
return value
|
||||
|
||||
|
||||
|
@ -35,7 +40,7 @@ class EncryptedField(serializers.CharField):
|
|||
def __init__(self, write_only=None, **kwargs):
|
||||
if write_only is None:
|
||||
write_only = True
|
||||
kwargs['write_only'] = write_only
|
||||
kwargs["write_only"] = write_only
|
||||
super().__init__(**kwargs)
|
||||
|
||||
def to_internal_value(self, value):
|
||||
|
@ -54,26 +59,26 @@ class LabeledChoiceField(ChoiceField):
|
|||
if value is None:
|
||||
return value
|
||||
return {
|
||||
'value': value,
|
||||
'label': self.choice_mapper.get(six.text_type(value), value),
|
||||
"value": value,
|
||||
"label": self.choice_mapper.get(six.text_type(value), value),
|
||||
}
|
||||
|
||||
def to_internal_value(self, data):
|
||||
if isinstance(data, dict):
|
||||
return data.get('value')
|
||||
return data.get("value")
|
||||
return super(LabeledChoiceField, self).to_internal_value(data)
|
||||
|
||||
|
||||
class ObjectRelatedField(serializers.RelatedField):
|
||||
default_error_messages = {
|
||||
'required': _('This field is required.'),
|
||||
'does_not_exist': _('Invalid pk "{pk_value}" - object does not exist.'),
|
||||
'incorrect_type': _('Incorrect type. Expected pk value, received {data_type}.'),
|
||||
"required": _("This field is required."),
|
||||
"does_not_exist": _('Invalid pk "{pk_value}" - object does not exist.'),
|
||||
"incorrect_type": _("Incorrect type. Expected pk value, received {data_type}."),
|
||||
}
|
||||
|
||||
def __init__(self, **kwargs):
|
||||
self.attrs = kwargs.pop('attrs', None) or ('id', 'name')
|
||||
self.many = kwargs.get('many', False)
|
||||
self.attrs = kwargs.pop("attrs", None) or ("id", "name")
|
||||
self.many = kwargs.get("many", False)
|
||||
super().__init__(**kwargs)
|
||||
|
||||
def to_representation(self, value):
|
||||
|
@ -86,13 +91,79 @@ class ObjectRelatedField(serializers.RelatedField):
|
|||
if not isinstance(data, dict):
|
||||
pk = data
|
||||
else:
|
||||
pk = data.get('id') or data.get('pk') or data.get(self.attrs[0])
|
||||
pk = data.get("id") or data.get("pk") or data.get(self.attrs[0])
|
||||
queryset = self.get_queryset()
|
||||
try:
|
||||
if isinstance(data, bool):
|
||||
raise TypeError
|
||||
return queryset.get(pk=pk)
|
||||
except ObjectDoesNotExist:
|
||||
self.fail('does_not_exist', pk_value=pk)
|
||||
self.fail("does_not_exist", pk_value=pk)
|
||||
except (TypeError, ValueError):
|
||||
self.fail('incorrect_type', data_type=type(pk).__name__)
|
||||
self.fail("incorrect_type", data_type=type(pk).__name__)
|
||||
|
||||
|
||||
class TreeChoicesMixin:
|
||||
tree = []
|
||||
|
||||
|
||||
class BitChoicesField(TreeChoicesMixin, serializers.MultipleChoiceField):
|
||||
"""
|
||||
位字段
|
||||
"""
|
||||
|
||||
def __init__(self, choice_cls, **kwargs):
|
||||
assert issubclass(choice_cls, BitChoices)
|
||||
choices = [(c.name, c.label) for c in choice_cls]
|
||||
self.tree = choice_cls.tree()
|
||||
self._choice_cls = choice_cls
|
||||
super().__init__(choices=choices, **kwargs)
|
||||
|
||||
def to_representation(self, value):
|
||||
if isinstance(value, list) and len(value) == 1:
|
||||
# Swagger 会使用 field.choices.keys() 迭代传递进来
|
||||
return [
|
||||
{"value": c.name, "label": c.label}
|
||||
for c in self._choice_cls
|
||||
if c.name == value[0]
|
||||
]
|
||||
return [
|
||||
{"value": c.name, "label": c.label}
|
||||
for c in self._choice_cls
|
||||
if c.value & value == c.value
|
||||
]
|
||||
|
||||
def to_internal_value(self, data):
|
||||
if not isinstance(data, list):
|
||||
raise serializers.ValidationError(_("Invalid data type, should be list"))
|
||||
value = 0
|
||||
if not data:
|
||||
return value
|
||||
if isinstance(data[0], dict):
|
||||
data = [d["value"] for d in data]
|
||||
# 所有的
|
||||
if "all" in data:
|
||||
for c in self._choice_cls:
|
||||
value |= c.value
|
||||
return value
|
||||
|
||||
name_value_map = {c.name: c.value for c in self._choice_cls}
|
||||
for name in data:
|
||||
if name not in name_value_map:
|
||||
raise serializers.ValidationError(_("Invalid choice: {}").format(name))
|
||||
value |= name_value_map[name]
|
||||
return value
|
||||
|
||||
def run_validation(self, data=empty):
|
||||
"""
|
||||
备注:
|
||||
创建授权规则不包含 actions 字段时, 会使用默认值(AssetPermission 中设置),
|
||||
会直接使用 ['connect', '...'] 等字段保存到数据库,导致类型错误
|
||||
这里将获取到的值再执行一下 to_internal_value 方法, 转化为内部值
|
||||
"""
|
||||
data = super().run_validation(data)
|
||||
if isinstance(data, int):
|
||||
return data
|
||||
value = self.to_internal_value(data)
|
||||
self.run_validators(value)
|
||||
return value
|
||||
|
|
|
@ -2,28 +2,33 @@
|
|||
#
|
||||
from __future__ import unicode_literals
|
||||
|
||||
from collections import OrderedDict
|
||||
import datetime
|
||||
from itertools import chain
|
||||
from collections import OrderedDict
|
||||
|
||||
from django.core.exceptions import PermissionDenied
|
||||
from django.http import Http404
|
||||
from django.utils.encoding import force_text
|
||||
from rest_framework.fields import empty
|
||||
|
||||
from rest_framework.metadata import SimpleMetadata
|
||||
from rest_framework import exceptions, serializers
|
||||
from rest_framework.fields import empty
|
||||
from rest_framework.metadata import SimpleMetadata
|
||||
from rest_framework.request import clone_request
|
||||
|
||||
from common.drf.fields import TreeChoicesMixin
|
||||
|
||||
|
||||
class SimpleMetadataWithFilters(SimpleMetadata):
|
||||
"""Override SimpleMetadata, adding info about filters"""
|
||||
|
||||
methods = {"PUT", "POST", "GET", "PATCH"}
|
||||
attrs = [
|
||||
'read_only', 'label', 'help_text',
|
||||
'min_length', 'max_length',
|
||||
'min_value', 'max_value', "write_only",
|
||||
"read_only",
|
||||
"label",
|
||||
"help_text",
|
||||
"min_length",
|
||||
"max_length",
|
||||
"min_value",
|
||||
"max_value",
|
||||
"write_only",
|
||||
]
|
||||
|
||||
def determine_actions(self, request, view):
|
||||
|
@ -32,18 +37,18 @@ class SimpleMetadataWithFilters(SimpleMetadata):
|
|||
the fields that are accepted for 'PUT' and 'POST' methods.
|
||||
"""
|
||||
actions = {}
|
||||
view.raw_action = getattr(view, 'action', None)
|
||||
view.raw_action = getattr(view, "action", None)
|
||||
for method in self.methods & set(view.allowed_methods):
|
||||
if hasattr(view, 'action_map'):
|
||||
if hasattr(view, "action_map"):
|
||||
view.action = view.action_map.get(method.lower(), view.action)
|
||||
|
||||
view.request = clone_request(request, method)
|
||||
try:
|
||||
# Test global permissions
|
||||
if hasattr(view, 'check_permissions'):
|
||||
if hasattr(view, "check_permissions"):
|
||||
view.check_permissions(view.request)
|
||||
# Test object permissions
|
||||
if method == 'PUT' and hasattr(view, 'get_object'):
|
||||
if method == "PUT" and hasattr(view, "get_object"):
|
||||
view.get_object()
|
||||
except (exceptions.APIException, PermissionDenied, Http404):
|
||||
pass
|
||||
|
@ -56,70 +61,86 @@ class SimpleMetadataWithFilters(SimpleMetadata):
|
|||
view.request = request
|
||||
return actions
|
||||
|
||||
def get_field_type(self, field):
|
||||
"""
|
||||
Given a field, return a string representing the type of the field.
|
||||
"""
|
||||
tp = self.label_lookup[field]
|
||||
|
||||
class_name = field.__class__.__name__
|
||||
if class_name == "LabeledChoiceField":
|
||||
tp = "labeled_choice"
|
||||
elif class_name == "ObjectRelatedField":
|
||||
tp = "object_related_field"
|
||||
elif class_name == "ManyRelatedField":
|
||||
child_relation_class_name = field.child_relation.__class__.__name__
|
||||
if child_relation_class_name == "ObjectRelatedField":
|
||||
tp = "m2m_related_field"
|
||||
return tp
|
||||
|
||||
@staticmethod
|
||||
def set_choices_field(field, field_info):
|
||||
field_info["choices"] = [
|
||||
{
|
||||
"value": choice_value,
|
||||
"label": force_text(choice_label, strings_only=True),
|
||||
}
|
||||
for choice_value, choice_label in dict(field.choices).items()
|
||||
]
|
||||
|
||||
@staticmethod
|
||||
def set_tree_field(field, field_info):
|
||||
field_info["tree"] = field.tree
|
||||
field_info["type"] = "tree"
|
||||
|
||||
def get_field_info(self, field):
|
||||
"""
|
||||
Given an instance of a serializer field, return a dictionary
|
||||
of metadata about it.
|
||||
"""
|
||||
field_info = OrderedDict()
|
||||
field_info['type'] = self.label_lookup[field]
|
||||
field_info['required'] = getattr(field, 'required', False)
|
||||
field_info["type"] = self.get_field_type(field)
|
||||
field_info["required"] = getattr(field, "required", False)
|
||||
|
||||
default = getattr(field, 'default', None)
|
||||
# Default value
|
||||
default = getattr(field, "default", None)
|
||||
if default is not None and default != empty:
|
||||
if isinstance(default, (str, int, bool, float, datetime.datetime, list)):
|
||||
field_info['default'] = default
|
||||
field_info["default"] = default
|
||||
|
||||
for attr in self.attrs:
|
||||
value = getattr(field, attr, None)
|
||||
if value is not None and value != '':
|
||||
if value is not None and value != "":
|
||||
field_info[attr] = force_text(value, strings_only=True)
|
||||
|
||||
if getattr(field, 'child', None):
|
||||
field_info['child'] = self.get_field_info(field.child)
|
||||
elif getattr(field, 'fields', None):
|
||||
field_info['children'] = self.get_serializer_info(field)
|
||||
|
||||
is_related_field = isinstance(field, (serializers.RelatedField, serializers.ManyRelatedField))
|
||||
if not is_related_field and hasattr(field, 'choices'):
|
||||
field_info['choices'] = [
|
||||
{
|
||||
'value': choice_value,
|
||||
'label': force_text(choice_name, strings_only=True)
|
||||
}
|
||||
for choice_value, choice_name in dict(field.choices).items()
|
||||
]
|
||||
|
||||
class_name = field.__class__.__name__
|
||||
if class_name == 'LabeledChoiceField':
|
||||
field_info['type'] = 'labeled_choice'
|
||||
elif class_name == 'ObjectRelatedField':
|
||||
field_info['type'] = 'object_related_field'
|
||||
elif class_name == 'ManyRelatedField':
|
||||
child_relation_class_name = field.child_relation.__class__.__name__
|
||||
if child_relation_class_name == 'ObjectRelatedField':
|
||||
field_info['type'] = 'm2m_related_field'
|
||||
|
||||
# if field.label == '系统平台':
|
||||
# print("Field: ", class_name, field, field_info)
|
||||
if getattr(field, "child", None):
|
||||
field_info["child"] = self.get_field_info(field.child)
|
||||
elif getattr(field, "fields", None):
|
||||
field_info["children"] = self.get_serializer_info(field)
|
||||
|
||||
if isinstance(field, TreeChoicesMixin):
|
||||
self.set_tree_field(field, field_info)
|
||||
elif isinstance(field, serializers.ChoiceField):
|
||||
self.set_choices_field(field, field_info)
|
||||
return field_info
|
||||
|
||||
def get_filters_fields(self, request, view):
|
||||
@staticmethod
|
||||
def get_filters_fields(request, view):
|
||||
fields = []
|
||||
if hasattr(view, 'get_filter_fields'):
|
||||
if hasattr(view, "get_filter_fields"):
|
||||
fields = view.get_filter_fields(request)
|
||||
elif hasattr(view, 'filter_fields'):
|
||||
elif hasattr(view, "filter_fields"):
|
||||
fields = view.filter_fields
|
||||
elif hasattr(view, 'filterset_fields'):
|
||||
elif hasattr(view, "filterset_fields"):
|
||||
fields = view.filterset_fields
|
||||
elif hasattr(view, 'get_filterset_fields'):
|
||||
elif hasattr(view, "get_filterset_fields"):
|
||||
fields = view.get_filterset_fields(request)
|
||||
elif hasattr(view, 'filterset_class'):
|
||||
fields = list(view.filterset_class.Meta.fields) + \
|
||||
list(view.filterset_class.declared_filters.keys())
|
||||
elif hasattr(view, "filterset_class"):
|
||||
fields = list(view.filterset_class.Meta.fields) + list(
|
||||
view.filterset_class.declared_filters.keys()
|
||||
)
|
||||
|
||||
if hasattr(view, 'custom_filter_fields'):
|
||||
if hasattr(view, "custom_filter_fields"):
|
||||
# 不能写 fields += view.custom_filter_fields
|
||||
# 会改变 view 的 filter_fields
|
||||
fields = list(fields) + list(view.custom_filter_fields)
|
||||
|
@ -128,16 +149,19 @@ class SimpleMetadataWithFilters(SimpleMetadata):
|
|||
fields = list(fields.keys())
|
||||
return fields
|
||||
|
||||
def get_ordering_fields(self, request, view):
|
||||
@staticmethod
|
||||
def get_ordering_fields(request, view):
|
||||
fields = []
|
||||
if hasattr(view, 'get_ordering_fields'):
|
||||
if hasattr(view, "get_ordering_fields"):
|
||||
fields = view.get_ordering_fields(request)
|
||||
elif hasattr(view, 'ordering_fields'):
|
||||
elif hasattr(view, "ordering_fields"):
|
||||
fields = view.ordering_fields
|
||||
return fields
|
||||
|
||||
def determine_metadata(self, request, view):
|
||||
metadata = super(SimpleMetadataWithFilters, self).determine_metadata(request, view)
|
||||
metadata = super(SimpleMetadataWithFilters, self).determine_metadata(
|
||||
request, view
|
||||
)
|
||||
filterset_fields = self.get_filters_fields(request, view)
|
||||
order_fields = self.get_ordering_fields(request, view)
|
||||
|
||||
|
|
|
@ -9,14 +9,20 @@ from rest_framework.request import Request
|
|||
from common.exceptions import UserConfirmRequired
|
||||
from audits.utils import create_operate_log
|
||||
from audits.models import OperateLog
|
||||
from audits.const import ActionChoices
|
||||
|
||||
__all__ = ["PermissionsMixin", "RecordViewLogMixin", "UserConfirmRequiredExceptionMixin"]
|
||||
__all__ = [
|
||||
"PermissionsMixin",
|
||||
"RecordViewLogMixin",
|
||||
"UserConfirmRequiredExceptionMixin",
|
||||
]
|
||||
|
||||
|
||||
class UserConfirmRequiredExceptionMixin:
|
||||
"""
|
||||
异常处理
|
||||
"""
|
||||
|
||||
def dispatch(self, request, *args, **kwargs):
|
||||
try:
|
||||
return super().dispatch(request, *args, **kwargs)
|
||||
|
@ -40,23 +46,23 @@ class PermissionsMixin(UserPassesTestMixin):
|
|||
|
||||
|
||||
class RecordViewLogMixin:
|
||||
ACTION = OperateLog.ACTION_VIEW
|
||||
ACTION = ActionChoices.view
|
||||
|
||||
@staticmethod
|
||||
def get_resource_display(request):
|
||||
query_params = dict(request.query_params)
|
||||
if query_params.get('format'):
|
||||
query_params.pop('format')
|
||||
spm_filter = query_params.pop('spm') if query_params.get('spm') else None
|
||||
if query_params.get("format"):
|
||||
query_params.pop("format")
|
||||
spm_filter = query_params.pop("spm") if query_params.get("spm") else None
|
||||
if not query_params and not spm_filter:
|
||||
display_message = _('Export all')
|
||||
display_message = _("Export all")
|
||||
elif spm_filter:
|
||||
display_message = _('Export only selected items')
|
||||
display_message = _("Export only selected items")
|
||||
else:
|
||||
query = ','.join(
|
||||
['%s=%s' % (key, value) for key, value in query_params.items()]
|
||||
query = ",".join(
|
||||
["%s=%s" % (key, value) for key, value in query_params.items()]
|
||||
)
|
||||
display_message = _('Export filtered: %s') % query
|
||||
display_message = _("Export filtered: %s") % query
|
||||
return display_message
|
||||
|
||||
def list(self, request, *args, **kwargs):
|
||||
|
|
|
@ -1,5 +1,6 @@
|
|||
import os
|
||||
|
||||
from django.utils.translation import ugettext_lazy as _
|
||||
from django.core.mail import send_mail, EmailMultiAlternatives
|
||||
from django.conf import settings
|
||||
from celery import shared_task
|
||||
|
@ -9,7 +10,7 @@ from .utils import get_logger
|
|||
logger = get_logger(__file__)
|
||||
|
||||
|
||||
@shared_task
|
||||
@shared_task(verbose_name=_("Send email"))
|
||||
def send_mail_async(*args, **kwargs):
|
||||
""" Using celery to send email async
|
||||
|
||||
|
@ -36,7 +37,7 @@ def send_mail_async(*args, **kwargs):
|
|||
logger.error("Sending mail error: {}".format(e))
|
||||
|
||||
|
||||
@shared_task
|
||||
@shared_task(verbose_name=_("Send email attachment"))
|
||||
def send_mail_attachment_async(subject, message, recipient_list, attachment_list=None):
|
||||
if attachment_list is None:
|
||||
attachment_list = []
|
||||
|
|
|
@ -344,7 +344,7 @@ def get_file_by_arch(dir, filename):
|
|||
return file_path
|
||||
|
||||
|
||||
def pretty_string(data: str, max_length=128, ellipsis_str='...'):
|
||||
def pretty_string(data, max_length=128, ellipsis_str='...'):
|
||||
"""
|
||||
params:
|
||||
data: abcdefgh
|
||||
|
@ -353,6 +353,7 @@ def pretty_string(data: str, max_length=128, ellipsis_str='...'):
|
|||
return:
|
||||
ab...gh
|
||||
"""
|
||||
data = str(data)
|
||||
if len(data) < max_length:
|
||||
return data
|
||||
remain_length = max_length - len(ellipsis_str)
|
||||
|
|
|
@ -0,0 +1,3 @@
|
|||
version https://git-lfs.github.com/spec/v1
|
||||
oid sha256:860b4d38beff81667c64da41c026a7dd28c3c93a28ae61fefaa7c26875f35638
|
||||
size 73906864
|
|
@ -0,0 +1,4 @@
|
|||
def bit(x):
|
||||
if x < 1:
|
||||
raise ValueError("x must be greater than 1")
|
||||
return 2 ** (x - 1)
|
|
@ -7,23 +7,22 @@
|
|||
2. 程序需要, 用户不需要更改的写到settings中
|
||||
3. 程序需要, 用户需要更改的写到本config中
|
||||
"""
|
||||
import base64
|
||||
import copy
|
||||
import errno
|
||||
import json
|
||||
import logging
|
||||
import os
|
||||
import re
|
||||
import sys
|
||||
import types
|
||||
import errno
|
||||
import json
|
||||
import yaml
|
||||
import copy
|
||||
import base64
|
||||
import logging
|
||||
from importlib import import_module
|
||||
from urllib.parse import urljoin, urlparse
|
||||
from gmssl.sm4 import CryptSM4, SM4_ENCRYPT, SM4_DECRYPT
|
||||
|
||||
import yaml
|
||||
from django.urls import reverse_lazy
|
||||
from django.utils.translation import ugettext_lazy as _
|
||||
|
||||
from gmssl.sm4 import CryptSM4, SM4_ENCRYPT, SM4_DECRYPT
|
||||
|
||||
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
|
||||
PROJECT_DIR = os.path.dirname(BASE_DIR)
|
||||
|
@ -499,6 +498,9 @@ class Config(dict):
|
|||
|
||||
'FORGOT_PASSWORD_URL': '',
|
||||
'HEALTH_CHECK_TOKEN': '',
|
||||
|
||||
# Applet 等软件的下载地址
|
||||
'APPLET_DOWNLOAD_HOST': '',
|
||||
}
|
||||
|
||||
def __init__(self, *args):
|
||||
|
|
|
@ -6,7 +6,7 @@ from django.utils.translation import ugettext_lazy as _
|
|||
|
||||
default_interface = dict((
|
||||
('logo_logout', static('img/logo.png')),
|
||||
('logo_index', static('img/logo_text.png')),
|
||||
('logo_index', static('img/logo_text_white.png')),
|
||||
('login_image', static('img/login_image.jpg')),
|
||||
('favicon', static('img/facio.ico')),
|
||||
('login_title', _('JumpServer Open Source Bastion Host')),
|
||||
|
|
|
@ -1,4 +1,5 @@
|
|||
import os
|
||||
|
||||
from django.urls import reverse_lazy
|
||||
|
||||
from .. import const
|
||||
|
@ -36,6 +37,9 @@ DEBUG_DEV = CONFIG.DEBUG_DEV
|
|||
# Absolute url for some case, for example email link
|
||||
SITE_URL = CONFIG.SITE_URL
|
||||
|
||||
# Absolute url for downloading applet
|
||||
APPLET_DOWNLOAD_HOST = CONFIG.APPLET_DOWNLOAD_HOST
|
||||
|
||||
# https://docs.djangoproject.com/en/4.1/ref/settings/
|
||||
SECURE_PROXY_SSL_HEADER = ('HTTP_X_FORWARDED_PROTO', 'https')
|
||||
|
||||
|
@ -313,7 +317,6 @@ PASSWORD_HASHERS = [
|
|||
'django.contrib.auth.hashers.BCryptSHA256PasswordHasher',
|
||||
]
|
||||
|
||||
|
||||
GMSSL_ENABLED = CONFIG.GMSSL_ENABLED
|
||||
GM_HASHER = 'common.hashers.PBKDF2SM3PasswordHasher'
|
||||
if GMSSL_ENABLED:
|
||||
|
@ -329,4 +332,3 @@ if os.environ.get('DEBUG_TOOLBAR', False):
|
|||
DEBUG_TOOLBAR_PANELS = [
|
||||
'debug_toolbar.panels.profiling.ProfilingPanel',
|
||||
]
|
||||
|
||||
|
|
|
@ -1,3 +1,3 @@
|
|||
version https://git-lfs.github.com/spec/v1
|
||||
oid sha256:07f1cfd07039142f4847b4139586bf815467f266119eae57476c073130f0ac92
|
||||
size 118098
|
||||
oid sha256:adfa9c01178d5f6490e616f62d41c71974d42f9e3bd078fcf1b3c7124384df0b
|
||||
size 117024
|
||||
|
|
File diff suppressed because it is too large
Load Diff
|
@ -1,3 +1,3 @@
|
|||
version https://git-lfs.github.com/spec/v1
|
||||
oid sha256:314c29cb8b10aaddbb030bf49af293be23f0153ff1f1c7562946879574ce6de8
|
||||
size 102801
|
||||
oid sha256:eeaa813f4ea052a1cd85b8ae5addfde6b088fd21a0261f8724d62823835512a2
|
||||
size 104043
|
||||
|
|
File diff suppressed because it is too large
Load Diff
|
@ -16,7 +16,6 @@ from .models import SystemMsgSubscription, UserMsgSubscription
|
|||
|
||||
__all__ = ('SystemMessage', 'UserMessage', 'system_msgs', 'Message')
|
||||
|
||||
|
||||
system_msgs = []
|
||||
user_msgs = []
|
||||
|
||||
|
@ -44,7 +43,7 @@ class MessageType(type):
|
|||
return clz
|
||||
|
||||
|
||||
@shared_task
|
||||
@shared_task(verbose_name=_('Publish the station message'))
|
||||
def publish_task(msg):
|
||||
msg.publish()
|
||||
|
||||
|
|
|
@ -5,6 +5,7 @@ class DefaultCallback:
|
|||
STATUS_MAPPER = {
|
||||
'successful': 'success',
|
||||
'failure': 'failed',
|
||||
'failed': 'failed',
|
||||
'running': 'running',
|
||||
'pending': 'pending',
|
||||
'unknown': 'unknown'
|
||||
|
|
|
@ -13,7 +13,7 @@ class AdHocRunner:
|
|||
"reboot", 'shutdown', 'poweroff', 'halt', 'dd', 'half', 'top'
|
||||
]
|
||||
|
||||
def __init__(self, inventory, module, module_args='', pattern='*', project_dir='/tmp/'):
|
||||
def __init__(self, inventory, module, module_args='', pattern='*', project_dir='/tmp/', extra_vars={}):
|
||||
self.id = uuid.uuid4()
|
||||
self.inventory = inventory
|
||||
self.pattern = pattern
|
||||
|
@ -22,6 +22,7 @@ class AdHocRunner:
|
|||
self.project_dir = project_dir
|
||||
self.cb = DefaultCallback()
|
||||
self.runner = None
|
||||
self.extra_vars = extra_vars
|
||||
|
||||
def check_module(self):
|
||||
if self.module not in self.cmd_modules_choices:
|
||||
|
@ -38,6 +39,7 @@ class AdHocRunner:
|
|||
os.mkdir(self.project_dir, 0o755)
|
||||
|
||||
ansible_runner.run(
|
||||
extravars=self.extra_vars,
|
||||
host_pattern=self.pattern,
|
||||
private_data_dir=self.project_dir,
|
||||
inventory=self.inventory,
|
||||
|
|
|
@ -3,4 +3,4 @@ from django.conf import settings
|
|||
|
||||
def get_ansible_task_log_path(task_id):
|
||||
from ops.utils import get_task_log_path
|
||||
return get_task_log_path(settings.ANSIBLE_LOG_DIR, task_id, level=3)
|
||||
return get_task_log_path(settings.CELERY_LOG_DIR, task_id, level=2)
|
||||
|
|
|
@ -2,3 +2,5 @@
|
|||
#
|
||||
from .adhoc import *
|
||||
from .celery import *
|
||||
from .job import *
|
||||
from .playbook import *
|
||||
|
|
|
@ -1,52 +1,17 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
#
|
||||
|
||||
from django.shortcuts import get_object_or_404
|
||||
from rest_framework import viewsets, generics
|
||||
from rest_framework.views import Response
|
||||
|
||||
from common.drf.serializers import CeleryTaskExecutionSerializer
|
||||
from ..models import AdHoc, AdHocExecution
|
||||
from rest_framework import viewsets
|
||||
from ..models import AdHoc
|
||||
from ..serializers import (
|
||||
AdHocSerializer,
|
||||
AdHocExecutionSerializer,
|
||||
AdHocDetailSerializer,
|
||||
AdHocSerializer
|
||||
)
|
||||
|
||||
__all__ = [
|
||||
'AdHocViewSet', 'AdHocExecutionViewSet'
|
||||
'AdHocViewSet'
|
||||
]
|
||||
|
||||
|
||||
class AdHocViewSet(viewsets.ModelViewSet):
|
||||
queryset = AdHoc.objects.all()
|
||||
serializer_class = AdHocSerializer
|
||||
|
||||
def get_serializer_class(self):
|
||||
if self.action == 'retrieve':
|
||||
return AdHocDetailSerializer
|
||||
return super().get_serializer_class()
|
||||
|
||||
|
||||
class AdHocExecutionViewSet(viewsets.ModelViewSet):
|
||||
queryset = AdHocExecution.objects.all()
|
||||
serializer_class = AdHocExecutionSerializer
|
||||
|
||||
def get_queryset(self):
|
||||
task_id = self.request.query_params.get('task')
|
||||
adhoc_id = self.request.query_params.get('adhoc')
|
||||
|
||||
if task_id:
|
||||
task = get_object_or_404(AdHoc, id=task_id)
|
||||
adhocs = task.adhoc.all()
|
||||
self.queryset = self.queryset.filter(adhoc__in=adhocs)
|
||||
|
||||
if adhoc_id:
|
||||
adhoc = get_object_or_404(AdHoc, id=adhoc_id)
|
||||
self.queryset = self.queryset.filter(adhoc=adhoc)
|
||||
return self.queryset
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
|
|
@ -98,20 +98,27 @@ class CeleryPeriodTaskViewSet(CommonApiMixin, viewsets.ModelViewSet):
|
|||
return queryset
|
||||
|
||||
|
||||
class CelerySummaryAPIView(generics.RetrieveAPIView):
|
||||
def get(self, request, *args, **kwargs):
|
||||
pass
|
||||
|
||||
|
||||
class CeleryTaskViewSet(CommonApiMixin, viewsets.ReadOnlyModelViewSet):
|
||||
queryset = CeleryTask.objects.all()
|
||||
serializer_class = CeleryTaskSerializer
|
||||
http_method_names = ('get', 'head', 'options',)
|
||||
|
||||
def get_queryset(self):
|
||||
return CeleryTask.objects.exclude(name__startswith='celery')
|
||||
|
||||
|
||||
class CeleryTaskExecutionViewSet(CommonApiMixin, viewsets.ReadOnlyModelViewSet):
|
||||
serializer_class = CeleryTaskExecutionSerializer
|
||||
http_method_names = ('get', 'head', 'options',)
|
||||
queryset = CeleryTaskExecution.objects.all()
|
||||
|
||||
def get_queryset(self):
|
||||
task_id = self.kwargs.get("task_pk")
|
||||
task_id = self.request.query_params.get('task_id')
|
||||
if task_id:
|
||||
task = CeleryTask.objects.get(pk=task_id)
|
||||
return CeleryTaskExecution.objects.filter(name=task.name)
|
||||
else:
|
||||
return CeleryTaskExecution.objects.none()
|
||||
task = get_object_or_404(CeleryTask, id=task_id)
|
||||
self.queryset = self.queryset.filter(name=task.name)
|
||||
return self.queryset
|
||||
|
|
|
@ -0,0 +1,43 @@
|
|||
from rest_framework import viewsets
|
||||
|
||||
from ops.models import Job, JobExecution
|
||||
from ops.serializers.job import JobSerializer, JobExecutionSerializer
|
||||
|
||||
__all__ = ['JobViewSet', 'JobExecutionViewSet']
|
||||
|
||||
from ops.tasks import run_ops_job, run_ops_job_executions
|
||||
from orgs.mixins.api import OrgBulkModelViewSet
|
||||
|
||||
|
||||
class JobViewSet(OrgBulkModelViewSet):
|
||||
serializer_class = JobSerializer
|
||||
model = Job
|
||||
permission_classes = ()
|
||||
|
||||
def get_queryset(self):
|
||||
query_set = super().get_queryset()
|
||||
return query_set.filter(instant=False)
|
||||
|
||||
def perform_create(self, serializer):
|
||||
instance = serializer.save()
|
||||
if instance.instant:
|
||||
run_ops_job.delay(instance.id)
|
||||
|
||||
|
||||
class JobExecutionViewSet(OrgBulkModelViewSet):
|
||||
serializer_class = JobExecutionSerializer
|
||||
http_method_names = ('get', 'post', 'head', 'options',)
|
||||
# filter_fields = ('type',)
|
||||
permission_classes = ()
|
||||
model = JobExecution
|
||||
|
||||
def perform_create(self, serializer):
|
||||
instance = serializer.save()
|
||||
run_ops_job_executions.delay(instance.id)
|
||||
|
||||
def get_queryset(self):
|
||||
query_set = super().get_queryset()
|
||||
job_id = self.request.query_params.get('job_id')
|
||||
if job_id:
|
||||
self.queryset = query_set.filter(job_id=job_id)
|
||||
return query_set
|
|
@ -0,0 +1,28 @@
|
|||
import os
|
||||
import zipfile
|
||||
|
||||
from django.conf import settings
|
||||
from rest_framework import viewsets
|
||||
from ..models import Playbook
|
||||
from ..serializers.playbook import PlaybookSerializer
|
||||
|
||||
__all__ = ["PlaybookViewSet"]
|
||||
|
||||
|
||||
def unzip_playbook(src, dist):
|
||||
fz = zipfile.ZipFile(src, 'r')
|
||||
for file in fz.namelist():
|
||||
fz.extract(file, dist)
|
||||
|
||||
|
||||
class PlaybookViewSet(viewsets.ModelViewSet):
|
||||
queryset = Playbook.objects.all()
|
||||
serializer_class = PlaybookSerializer
|
||||
|
||||
def perform_create(self, serializer):
|
||||
instance = serializer.save()
|
||||
src_path = os.path.join(settings.MEDIA_ROOT, instance.path.name)
|
||||
dest_path = os.path.join(settings.DATA_DIR, "ops", "playbook", instance.id.__str__())
|
||||
if os.path.exists(dest_path):
|
||||
os.makedirs(dest_path)
|
||||
unzip_playbook(src_path, dest_path)
|
|
@ -0,0 +1,171 @@
|
|||
# Generated by Django 3.2.14 on 2022-11-11 11:19
|
||||
|
||||
from django.conf import settings
|
||||
from django.db import migrations, models
|
||||
import django.db.models.deletion
|
||||
import uuid
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
|
||||
('assets', '0111_alter_automationexecution_status'),
|
||||
('ops', '0028_celerytask_last_published_time'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.CreateModel(
|
||||
name='Job',
|
||||
fields=[
|
||||
('created_by', models.CharField(blank=True, max_length=32, null=True, verbose_name='Created by')),
|
||||
('updated_by', models.CharField(blank=True, max_length=32, null=True, verbose_name='Updated by')),
|
||||
('date_created', models.DateTimeField(auto_now_add=True, null=True, verbose_name='Date created')),
|
||||
('date_updated', models.DateTimeField(auto_now=True, verbose_name='Date updated')),
|
||||
('id', models.UUIDField(default=uuid.uuid4, primary_key=True, serialize=False)),
|
||||
('name', models.CharField(max_length=128, null=True, verbose_name='Name')),
|
||||
('instant', models.BooleanField(default=False)),
|
||||
('args', models.CharField(blank=True, default='', max_length=1024, null=True, verbose_name='Args')),
|
||||
('module', models.CharField(choices=[('shell', 'Shell'), ('win_shell', 'Powershell')], default='shell', max_length=128, null=True, verbose_name='Module')),
|
||||
('type', models.CharField(choices=[('adhoc', 'Adhoc'), ('playbook', 'Playbook')], default='adhoc', max_length=128, verbose_name='Type')),
|
||||
('runas', models.CharField(default='root', max_length=128, verbose_name='Runas')),
|
||||
('runas_policy', models.CharField(choices=[('privileged_only', 'Privileged Only'), ('privileged_first', 'Privileged First'), ('skip', 'Skip')], default='skip', max_length=128, verbose_name='Runas policy')),
|
||||
('assets', models.ManyToManyField(to='assets.Asset', verbose_name='Assets')),
|
||||
('owner', models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, to=settings.AUTH_USER_MODEL, verbose_name='Creator')),
|
||||
],
|
||||
options={
|
||||
'abstract': False,
|
||||
},
|
||||
),
|
||||
migrations.CreateModel(
|
||||
name='JobExecution',
|
||||
fields=[
|
||||
('created_by', models.CharField(blank=True, max_length=32, null=True, verbose_name='Created by')),
|
||||
('updated_by', models.CharField(blank=True, max_length=32, null=True, verbose_name='Updated by')),
|
||||
('date_updated', models.DateTimeField(auto_now=True, verbose_name='Date updated')),
|
||||
('id', models.UUIDField(default=uuid.uuid4, primary_key=True, serialize=False)),
|
||||
('task_id', models.UUIDField(null=True)),
|
||||
('status', models.CharField(default='running', max_length=16, verbose_name='Status')),
|
||||
('result', models.JSONField(blank=True, null=True, verbose_name='Result')),
|
||||
('summary', models.JSONField(default=dict, verbose_name='Summary')),
|
||||
('date_created', models.DateTimeField(auto_now_add=True, verbose_name='Date created')),
|
||||
('date_start', models.DateTimeField(db_index=True, null=True, verbose_name='Date start')),
|
||||
('date_finished', models.DateTimeField(null=True, verbose_name='Date finished')),
|
||||
('creator', models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, to=settings.AUTH_USER_MODEL, verbose_name='Creator')),
|
||||
('job', models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, related_name='executions', to='ops.job')),
|
||||
],
|
||||
options={
|
||||
'abstract': False,
|
||||
},
|
||||
),
|
||||
migrations.AlterUniqueTogether(
|
||||
name='playbooktemplate',
|
||||
unique_together=None,
|
||||
),
|
||||
migrations.RemoveField(
|
||||
model_name='adhoc',
|
||||
name='account',
|
||||
),
|
||||
migrations.RemoveField(
|
||||
model_name='adhoc',
|
||||
name='account_policy',
|
||||
),
|
||||
migrations.RemoveField(
|
||||
model_name='adhoc',
|
||||
name='assets',
|
||||
),
|
||||
migrations.RemoveField(
|
||||
model_name='adhoc',
|
||||
name='crontab',
|
||||
),
|
||||
migrations.RemoveField(
|
||||
model_name='adhoc',
|
||||
name='date_last_run',
|
||||
),
|
||||
migrations.RemoveField(
|
||||
model_name='adhoc',
|
||||
name='interval',
|
||||
),
|
||||
migrations.RemoveField(
|
||||
model_name='adhoc',
|
||||
name='is_periodic',
|
||||
),
|
||||
migrations.RemoveField(
|
||||
model_name='adhoc',
|
||||
name='last_execution',
|
||||
),
|
||||
migrations.RemoveField(
|
||||
model_name='adhoc',
|
||||
name='org_id',
|
||||
),
|
||||
migrations.RemoveField(
|
||||
model_name='playbook',
|
||||
name='account',
|
||||
),
|
||||
migrations.RemoveField(
|
||||
model_name='playbook',
|
||||
name='account_policy',
|
||||
),
|
||||
migrations.RemoveField(
|
||||
model_name='playbook',
|
||||
name='assets',
|
||||
),
|
||||
migrations.RemoveField(
|
||||
model_name='playbook',
|
||||
name='comment',
|
||||
),
|
||||
migrations.RemoveField(
|
||||
model_name='playbook',
|
||||
name='crontab',
|
||||
),
|
||||
migrations.RemoveField(
|
||||
model_name='playbook',
|
||||
name='date_last_run',
|
||||
),
|
||||
migrations.RemoveField(
|
||||
model_name='playbook',
|
||||
name='interval',
|
||||
),
|
||||
migrations.RemoveField(
|
||||
model_name='playbook',
|
||||
name='is_periodic',
|
||||
),
|
||||
migrations.RemoveField(
|
||||
model_name='playbook',
|
||||
name='last_execution',
|
||||
),
|
||||
migrations.RemoveField(
|
||||
model_name='playbook',
|
||||
name='org_id',
|
||||
),
|
||||
migrations.RemoveField(
|
||||
model_name='playbook',
|
||||
name='template',
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='adhoc',
|
||||
name='module',
|
||||
field=models.CharField(choices=[('shell', 'Shell'), ('win_shell', 'Powershell')], default='shell', max_length=128, verbose_name='Module'),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='playbook',
|
||||
name='name',
|
||||
field=models.CharField(max_length=128, null=True, verbose_name='Name'),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='playbook',
|
||||
name='path',
|
||||
field=models.FileField(upload_to='playbooks/'),
|
||||
),
|
||||
migrations.DeleteModel(
|
||||
name='PlaybookExecution',
|
||||
),
|
||||
migrations.DeleteModel(
|
||||
name='PlaybookTemplate',
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='job',
|
||||
name='playbook',
|
||||
field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, to='ops.playbook', verbose_name='Playbook'),
|
||||
),
|
||||
]
|
|
@ -0,0 +1,42 @@
|
|||
# Generated by Django 3.2.14 on 2022-11-16 10:11
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('ops', '0029_auto_20221111_1919'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AlterModelOptions(
|
||||
name='celerytask',
|
||||
options={'ordering': ('name',)},
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='job',
|
||||
name='variables',
|
||||
field=models.JSONField(default=dict, verbose_name='Variables'),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='celerytask',
|
||||
name='name',
|
||||
field=models.CharField(max_length=1024, verbose_name='Name'),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='celerytaskexecution',
|
||||
name='date_finished',
|
||||
field=models.DateTimeField(null=True, verbose_name='Date finished'),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='celerytaskexecution',
|
||||
name='date_published',
|
||||
field=models.DateTimeField(auto_now_add=True, verbose_name='Date published'),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='celerytaskexecution',
|
||||
name='date_start',
|
||||
field=models.DateTimeField(null=True, verbose_name='Date start'),
|
||||
),
|
||||
]
|
|
@ -0,0 +1,28 @@
|
|||
# Generated by Django 3.2.14 on 2022-11-16 12:24
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('ops', '0030_auto_20221116_1811'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name='job',
|
||||
name='chdir',
|
||||
field=models.CharField(blank=True, default='', max_length=1024, null=True, verbose_name='Chdir'),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='job',
|
||||
name='comment',
|
||||
field=models.CharField(blank=True, default='', max_length=1024, null=True, verbose_name='Comment'),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='job',
|
||||
name='timeout',
|
||||
field=models.IntegerField(default=60, verbose_name='Timeout (Seconds)'),
|
||||
),
|
||||
]
|
|
@ -0,0 +1,27 @@
|
|||
# Generated by Django 3.2.14 on 2022-11-17 10:48
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('ops', '0031_auto_20221116_2024'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.RemoveField(
|
||||
model_name='job',
|
||||
name='variables',
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='job',
|
||||
name='parameters_define',
|
||||
field=models.JSONField(default=dict, verbose_name='Parameters define'),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='jobexecution',
|
||||
name='parameters',
|
||||
field=models.JSONField(default=dict, verbose_name='Parameters'),
|
||||
),
|
||||
]
|
|
@ -0,0 +1,28 @@
|
|||
# Generated by Django 3.2.14 on 2022-11-18 06:31
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('ops', '0032_auto_20221117_1848'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name='job',
|
||||
name='crontab',
|
||||
field=models.CharField(blank=True, max_length=128, null=True, verbose_name='Regularly perform'),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='job',
|
||||
name='interval',
|
||||
field=models.IntegerField(blank=True, default=24, null=True, verbose_name='Cycle perform'),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='job',
|
||||
name='is_periodic',
|
||||
field=models.BooleanField(default=False),
|
||||
),
|
||||
]
|
|
@ -0,0 +1,18 @@
|
|||
# Generated by Django 3.2.14 on 2022-11-23 09:45
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('ops', '0033_auto_20221118_1431'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name='job',
|
||||
name='org_id',
|
||||
field=models.CharField(blank=True, db_index=True, default='', max_length=36, verbose_name='Organization'),
|
||||
),
|
||||
]
|
|
@ -0,0 +1,18 @@
|
|||
# Generated by Django 3.2.14 on 2022-11-23 10:22
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('ops', '0034_job_org_id'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name='jobexecution',
|
||||
name='org_id',
|
||||
field=models.CharField(blank=True, db_index=True, default='', max_length=36, verbose_name='Organization'),
|
||||
),
|
||||
]
|
|
@ -4,3 +4,4 @@
|
|||
from .adhoc import *
|
||||
from .celery import *
|
||||
from .playbook import *
|
||||
from .job import *
|
||||
|
|
|
@ -1,29 +1,43 @@
|
|||
# ~*~ coding: utf-8 ~*~
|
||||
import os.path
|
||||
import uuid
|
||||
|
||||
from django.db import models
|
||||
from django.utils.translation import ugettext_lazy as _
|
||||
|
||||
from common.db.models import BaseCreateUpdateModel
|
||||
from common.utils import get_logger
|
||||
from .base import BaseAnsibleJob, BaseAnsibleExecution
|
||||
from ..ansible import AdHocRunner
|
||||
|
||||
__all__ = ["AdHoc", "AdHocExecution"]
|
||||
|
||||
|
||||
logger = get_logger(__file__)
|
||||
|
||||
|
||||
class AdHoc(BaseAnsibleJob):
|
||||
pattern = models.CharField(max_length=1024, verbose_name=_("Pattern"), default='all')
|
||||
module = models.CharField(max_length=128, default='shell', verbose_name=_('Module'))
|
||||
args = models.CharField(max_length=1024, default='', verbose_name=_('Args'))
|
||||
last_execution = models.ForeignKey('AdHocExecution', verbose_name=_("Last execution"),
|
||||
on_delete=models.SET_NULL, null=True, blank=True)
|
||||
class AdHoc(BaseCreateUpdateModel):
|
||||
class Modules(models.TextChoices):
|
||||
shell = 'shell', _('Shell')
|
||||
winshell = 'win_shell', _('Powershell')
|
||||
|
||||
def get_register_task(self):
|
||||
from ops.tasks import run_adhoc
|
||||
return "run_adhoc_{}".format(self.id), run_adhoc, (str(self.id),), {}
|
||||
id = models.UUIDField(default=uuid.uuid4, primary_key=True)
|
||||
name = models.CharField(max_length=128, verbose_name=_('Name'))
|
||||
pattern = models.CharField(max_length=1024, verbose_name=_("Pattern"), default='all')
|
||||
module = models.CharField(max_length=128, choices=Modules.choices, default=Modules.shell,
|
||||
verbose_name=_('Module'))
|
||||
args = models.CharField(max_length=1024, default='', verbose_name=_('Args'))
|
||||
owner = models.ForeignKey('users.User', verbose_name=_("Creator"), on_delete=models.SET_NULL, null=True)
|
||||
|
||||
@property
|
||||
def row_count(self):
|
||||
if len(self.args) == 0:
|
||||
return 0
|
||||
count = str(self.args).count('\n')
|
||||
return count + 1
|
||||
|
||||
@property
|
||||
def size(self):
|
||||
return len(self.args)
|
||||
|
||||
def __str__(self):
|
||||
return "{}: {}".format(self.module, self.args)
|
||||
|
|
|
@ -17,7 +17,8 @@ class BaseAnsibleJob(PeriodTaskModelMixin, JMSOrgBaseModel):
|
|||
assets = models.ManyToManyField('assets.Asset', verbose_name=_("Assets"))
|
||||
account = models.CharField(max_length=128, default='root', verbose_name=_('Account'))
|
||||
account_policy = models.CharField(max_length=128, default='root', verbose_name=_('Account policy'))
|
||||
last_execution = models.ForeignKey('BaseAnsibleExecution', verbose_name=_("Last execution"), on_delete=models.SET_NULL, null=True)
|
||||
last_execution = models.ForeignKey('BaseAnsibleExecution', verbose_name=_("Last execution"),
|
||||
on_delete=models.SET_NULL, null=True)
|
||||
date_last_run = models.DateTimeField(null=True, verbose_name=_('Date last run'))
|
||||
|
||||
class Meta:
|
||||
|
@ -118,12 +119,6 @@ class BaseAnsibleExecution(models.Model):
|
|||
def is_success(self):
|
||||
return self.status == 'success'
|
||||
|
||||
@property
|
||||
def time_cost(self):
|
||||
if self.date_finished and self.date_start:
|
||||
return (self.date_finished - self.date_start).total_seconds()
|
||||
return None
|
||||
|
||||
@property
|
||||
def short_id(self):
|
||||
return str(self.id).split('-')[-1]
|
||||
|
@ -134,4 +129,8 @@ class BaseAnsibleExecution(models.Model):
|
|||
return self.date_finished - self.date_start
|
||||
return None
|
||||
|
||||
|
||||
@property
|
||||
def time_cost(self):
|
||||
if self.date_finished and self.date_start:
|
||||
return (self.date_finished - self.date_start).total_seconds()
|
||||
return None
|
||||
|
|
|
@ -12,18 +12,24 @@ from ops.celery import app
|
|||
|
||||
class CeleryTask(models.Model):
|
||||
id = models.UUIDField(primary_key=True, default=uuid.uuid4)
|
||||
name = models.CharField(max_length=1024)
|
||||
name = models.CharField(max_length=1024, verbose_name=_('Name'))
|
||||
last_published_time = models.DateTimeField(null=True)
|
||||
|
||||
@property
|
||||
def meta(self):
|
||||
task = app.tasks.get(self.name, None)
|
||||
return {
|
||||
"verbose_name": getattr(task, 'verbose_name', None),
|
||||
"comment": getattr(task, 'comment', None),
|
||||
"comment": getattr(task, 'verbose_name', None),
|
||||
"queue": getattr(task, 'queue', 'default')
|
||||
}
|
||||
|
||||
@property
|
||||
def summary(self):
|
||||
executions = CeleryTaskExecution.objects.filter(name=self.name)
|
||||
total = executions.count()
|
||||
success = executions.filter(state='SUCCESS').count()
|
||||
return {'total': total, 'success': success}
|
||||
|
||||
@property
|
||||
def state(self):
|
||||
last_five_executions = CeleryTaskExecution.objects.filter(name=self.name).order_by('-date_published')[:5]
|
||||
|
@ -37,6 +43,9 @@ class CeleryTask(models.Model):
|
|||
return "yellow"
|
||||
return "green"
|
||||
|
||||
class Meta:
|
||||
ordering = ('name',)
|
||||
|
||||
|
||||
class CeleryTaskExecution(models.Model):
|
||||
LOG_DIR = os.path.join(settings.PROJECT_DIR, 'data', 'celery')
|
||||
|
@ -46,9 +55,21 @@ class CeleryTaskExecution(models.Model):
|
|||
kwargs = models.JSONField(verbose_name=_("Kwargs"))
|
||||
state = models.CharField(max_length=16, verbose_name=_("State"))
|
||||
is_finished = models.BooleanField(default=False, verbose_name=_("Finished"))
|
||||
date_published = models.DateTimeField(auto_now_add=True)
|
||||
date_start = models.DateTimeField(null=True)
|
||||
date_finished = models.DateTimeField(null=True)
|
||||
date_published = models.DateTimeField(auto_now_add=True, verbose_name=_('Date published'))
|
||||
date_start = models.DateTimeField(null=True, verbose_name=_('Date start'))
|
||||
date_finished = models.DateTimeField(null=True, verbose_name=_('Date finished'))
|
||||
|
||||
@property
|
||||
def time_cost(self):
|
||||
if self.date_finished and self.date_start:
|
||||
return (self.date_finished - self.date_start).total_seconds()
|
||||
return None
|
||||
|
||||
@property
|
||||
def timedelta(self):
|
||||
if self.date_start and self.date_finished:
|
||||
return self.date_finished - self.date_start
|
||||
return None
|
||||
|
||||
def __str__(self):
|
||||
return "{}: {}".format(self.name, self.id)
|
||||
|
|
|
@ -0,0 +1,4 @@
|
|||
# 内置环境变量
|
||||
BUILTIN_VARIABLES = {
|
||||
|
||||
}
|
|
@ -0,0 +1,200 @@
|
|||
import json
|
||||
import os
|
||||
import uuid
|
||||
import logging
|
||||
|
||||
from django.conf import settings
|
||||
from django.db import models
|
||||
from django.utils.translation import gettext_lazy as _
|
||||
from django.utils import timezone
|
||||
from celery import current_task
|
||||
|
||||
__all__ = ["Job", "JobExecution"]
|
||||
|
||||
from ops.ansible import JMSInventory, AdHocRunner, PlaybookRunner
|
||||
from ops.mixin import PeriodTaskModelMixin
|
||||
from orgs.mixins.models import JMSOrgBaseModel
|
||||
|
||||
|
||||
class Job(JMSOrgBaseModel, PeriodTaskModelMixin):
|
||||
class Types(models.TextChoices):
|
||||
adhoc = 'adhoc', _('Adhoc')
|
||||
playbook = 'playbook', _('Playbook')
|
||||
|
||||
class RunasPolicies(models.TextChoices):
|
||||
privileged_only = 'privileged_only', _('Privileged Only')
|
||||
privileged_first = 'privileged_first', _('Privileged First')
|
||||
skip = 'skip', _('Skip')
|
||||
|
||||
class Modules(models.TextChoices):
|
||||
shell = 'shell', _('Shell')
|
||||
winshell = 'win_shell', _('Powershell')
|
||||
|
||||
id = models.UUIDField(default=uuid.uuid4, primary_key=True)
|
||||
name = models.CharField(max_length=128, null=True, verbose_name=_('Name'))
|
||||
instant = models.BooleanField(default=False)
|
||||
args = models.CharField(max_length=1024, default='', verbose_name=_('Args'), null=True, blank=True)
|
||||
module = models.CharField(max_length=128, choices=Modules.choices, default=Modules.shell,
|
||||
verbose_name=_('Module'), null=True)
|
||||
chdir = models.CharField(default="", max_length=1024, verbose_name=_('Chdir'), null=True, blank=True)
|
||||
timeout = models.IntegerField(default=60, verbose_name=_('Timeout (Seconds)'))
|
||||
playbook = models.ForeignKey('ops.Playbook', verbose_name=_("Playbook"), null=True, on_delete=models.SET_NULL)
|
||||
type = models.CharField(max_length=128, choices=Types.choices, default=Types.adhoc, verbose_name=_("Type"))
|
||||
owner = models.ForeignKey('users.User', verbose_name=_("Creator"), on_delete=models.SET_NULL, null=True)
|
||||
assets = models.ManyToManyField('assets.Asset', verbose_name=_("Assets"))
|
||||
runas = models.CharField(max_length=128, default='root', verbose_name=_('Runas'))
|
||||
runas_policy = models.CharField(max_length=128, choices=RunasPolicies.choices, default=RunasPolicies.skip,
|
||||
verbose_name=_('Runas policy'))
|
||||
parameters_define = models.JSONField(default=dict, verbose_name=_('Parameters define'))
|
||||
comment = models.CharField(max_length=1024, default='', verbose_name=_('Comment'), null=True, blank=True)
|
||||
|
||||
@property
|
||||
def last_execution(self):
|
||||
return self.executions.last()
|
||||
|
||||
@property
|
||||
def date_last_run(self):
|
||||
return self.last_execution.date_created if self.last_execution else None
|
||||
|
||||
@property
|
||||
def summary(self):
|
||||
summary = {
|
||||
"total": 0,
|
||||
"success": 0,
|
||||
}
|
||||
for execution in self.executions.all():
|
||||
summary["total"] += 1
|
||||
if execution.is_success:
|
||||
summary["success"] += 1
|
||||
return summary
|
||||
|
||||
@property
|
||||
def average_time_cost(self):
|
||||
total_cost = 0
|
||||
finished_count = self.executions.filter(status__in=['success', 'failed']).count()
|
||||
for execution in self.executions.filter(status__in=['success', 'failed']).all():
|
||||
total_cost += execution.time_cost
|
||||
return total_cost / finished_count if finished_count else 0
|
||||
|
||||
def get_register_task(self):
|
||||
from ..tasks import run_ops_job
|
||||
name = "run_ops_job_period_{}".format(str(self.id)[:8])
|
||||
task = run_ops_job.name
|
||||
args = (str(self.id),)
|
||||
kwargs = {}
|
||||
return name, task, args, kwargs
|
||||
|
||||
@property
|
||||
def inventory(self):
|
||||
return JMSInventory(self.assets.all(), self.runas_policy, self.runas)
|
||||
|
||||
def create_execution(self):
|
||||
return self.executions.create()
|
||||
|
||||
|
||||
class JobExecution(JMSOrgBaseModel):
|
||||
id = models.UUIDField(default=uuid.uuid4, primary_key=True)
|
||||
task_id = models.UUIDField(null=True)
|
||||
status = models.CharField(max_length=16, verbose_name=_('Status'), default='running')
|
||||
job = models.ForeignKey(Job, on_delete=models.CASCADE, related_name='executions', null=True)
|
||||
parameters = models.JSONField(default=dict, verbose_name=_('Parameters'))
|
||||
result = models.JSONField(blank=True, null=True, verbose_name=_('Result'))
|
||||
summary = models.JSONField(default=dict, verbose_name=_('Summary'))
|
||||
creator = models.ForeignKey('users.User', verbose_name=_("Creator"), on_delete=models.SET_NULL, null=True)
|
||||
date_created = models.DateTimeField(auto_now_add=True, verbose_name=_('Date created'))
|
||||
date_start = models.DateTimeField(null=True, verbose_name=_('Date start'), db_index=True)
|
||||
date_finished = models.DateTimeField(null=True, verbose_name=_("Date finished"))
|
||||
|
||||
def get_runner(self):
|
||||
inv = self.job.inventory
|
||||
inv.write_to_file(self.inventory_path)
|
||||
if isinstance(self.parameters, str):
|
||||
extra_vars = json.loads(self.parameters)
|
||||
else:
|
||||
extra_vars = {}
|
||||
|
||||
if self.job.type == 'adhoc':
|
||||
runner = AdHocRunner(
|
||||
self.inventory_path, self.job.module, module_args=self.job.args,
|
||||
pattern="all", project_dir=self.private_dir, extra_vars=extra_vars,
|
||||
)
|
||||
elif self.job.type == 'playbook':
|
||||
runner = PlaybookRunner(
|
||||
self.inventory_path, self.job.playbook.work_path
|
||||
)
|
||||
else:
|
||||
raise Exception("unsupported job type")
|
||||
return runner
|
||||
|
||||
@property
|
||||
def short_id(self):
|
||||
return str(self.id).split('-')[-1]
|
||||
|
||||
@property
|
||||
def time_cost(self):
|
||||
if self.date_finished and self.date_start:
|
||||
return (self.date_finished - self.date_start).total_seconds()
|
||||
return None
|
||||
|
||||
@property
|
||||
def timedelta(self):
|
||||
if self.date_start and self.date_finished:
|
||||
return self.date_finished - self.date_start
|
||||
return None
|
||||
|
||||
@property
|
||||
def is_finished(self):
|
||||
return self.status in ['success', 'failed']
|
||||
|
||||
@property
|
||||
def is_success(self):
|
||||
return self.status == 'success'
|
||||
|
||||
@property
|
||||
def inventory_path(self):
|
||||
return os.path.join(self.private_dir, 'inventory', 'hosts')
|
||||
|
||||
@property
|
||||
def private_dir(self):
|
||||
uniq = self.date_created.strftime('%Y%m%d_%H%M%S') + '_' + self.short_id
|
||||
job_name = self.job.name if self.job.name else 'instant'
|
||||
return os.path.join(settings.ANSIBLE_DIR, job_name, uniq)
|
||||
|
||||
def set_error(self, error):
|
||||
this = self.__class__.objects.get(id=self.id) # 重新获取一次,避免数据库超时连接超时
|
||||
this.status = 'failed'
|
||||
this.summary['error'] = str(error)
|
||||
this.finish_task()
|
||||
|
||||
def set_result(self, cb):
|
||||
status_mapper = {
|
||||
'successful': 'success',
|
||||
}
|
||||
this = self.__class__.objects.get(id=self.id)
|
||||
this.status = status_mapper.get(cb.status, cb.status)
|
||||
this.summary = cb.summary
|
||||
this.result = cb.result
|
||||
this.finish_task()
|
||||
|
||||
def finish_task(self):
|
||||
self.date_finished = timezone.now()
|
||||
self.save(update_fields=['result', 'status', 'summary', 'date_finished'])
|
||||
|
||||
def set_celery_id(self):
|
||||
if not current_task:
|
||||
return
|
||||
task_id = current_task.request.root_id
|
||||
self.task_id = task_id
|
||||
|
||||
def start(self, **kwargs):
|
||||
self.date_start = timezone.now()
|
||||
self.set_celery_id()
|
||||
self.save()
|
||||
runner = self.get_runner()
|
||||
try:
|
||||
cb = runner.run(**kwargs)
|
||||
self.set_result(cb)
|
||||
return cb
|
||||
except Exception as e:
|
||||
logging.error(e, exc_info=True)
|
||||
self.set_error(e)
|
|
@ -1,39 +1,19 @@
|
|||
import os.path
|
||||
import uuid
|
||||
|
||||
from django.conf import settings
|
||||
from django.db import models
|
||||
from django.utils.translation import gettext_lazy as _
|
||||
|
||||
from orgs.mixins.models import JMSOrgBaseModel
|
||||
from .base import BaseAnsibleExecution, BaseAnsibleJob
|
||||
from common.db.models import BaseCreateUpdateModel
|
||||
|
||||
|
||||
class PlaybookTemplate(JMSOrgBaseModel):
|
||||
name = models.CharField(max_length=128, verbose_name=_("Name"))
|
||||
path = models.FilePathField(verbose_name=_("Path"))
|
||||
comment = models.TextField(verbose_name=_("Comment"), blank=True)
|
||||
|
||||
def __str__(self):
|
||||
return self.name
|
||||
|
||||
class Meta:
|
||||
ordering = ['name']
|
||||
verbose_name = _("Playbook template")
|
||||
unique_together = [('org_id', 'name')]
|
||||
|
||||
|
||||
class Playbook(BaseAnsibleJob):
|
||||
path = models.FilePathField(max_length=1024, verbose_name=_("Playbook"))
|
||||
class Playbook(BaseCreateUpdateModel):
|
||||
id = models.UUIDField(default=uuid.uuid4, primary_key=True)
|
||||
name = models.CharField(max_length=128, verbose_name=_('Name'), null=True)
|
||||
path = models.FileField(upload_to='playbooks/')
|
||||
owner = models.ForeignKey('users.User', verbose_name=_("Owner"), on_delete=models.SET_NULL, null=True)
|
||||
comment = models.TextField(blank=True, verbose_name=_("Comment"))
|
||||
template = models.ForeignKey('PlaybookTemplate', verbose_name=_("Template"), on_delete=models.SET_NULL, null=True)
|
||||
last_execution = models.ForeignKey('PlaybookExecution', verbose_name=_("Last execution"), on_delete=models.SET_NULL, null=True, blank=True)
|
||||
|
||||
def get_register_task(self):
|
||||
name = "automation_strategy_period_{}".format(str(self.id)[:8])
|
||||
task = execute_automation_strategy.name
|
||||
args = (str(self.id), Trigger.timing)
|
||||
kwargs = {}
|
||||
return name, task, args, kwargs
|
||||
|
||||
|
||||
class PlaybookExecution(BaseAnsibleExecution):
|
||||
task = models.ForeignKey('Playbook', verbose_name=_("Task"), on_delete=models.CASCADE)
|
||||
path = models.FilePathField(max_length=1024, verbose_name=_("Run dir"))
|
||||
@property
|
||||
def work_path(self):
|
||||
return os.path.join(settings.DATA_DIR, "ops", "playbook", self.id.__str__(), "main.yaml")
|
||||
|
|
|
@ -1,11 +1,24 @@
|
|||
# ~*~ coding: utf-8 ~*~
|
||||
from __future__ import unicode_literals
|
||||
from rest_framework import serializers
|
||||
from django.shortcuts import reverse
|
||||
|
||||
import datetime
|
||||
|
||||
from rest_framework import serializers
|
||||
|
||||
from common.drf.fields import ReadableHiddenField
|
||||
from ..models import AdHoc, AdHocExecution
|
||||
|
||||
|
||||
class AdHocSerializer(serializers.ModelSerializer):
|
||||
owner = ReadableHiddenField(default=serializers.CurrentUserDefault())
|
||||
row_count = serializers.IntegerField(read_only=True)
|
||||
size = serializers.IntegerField(read_only=True)
|
||||
|
||||
class Meta:
|
||||
model = AdHoc
|
||||
fields = ["id", "name", "module", "row_count", "size", "args", "owner", "date_created", "date_updated"]
|
||||
|
||||
|
||||
class AdHocExecutionSerializer(serializers.ModelSerializer):
|
||||
stat = serializers.SerializerMethodField()
|
||||
last_success = serializers.ListField(source='success_hosts')
|
||||
|
@ -49,26 +62,6 @@ class AdHocExecutionExcludeResultSerializer(AdHocExecutionSerializer):
|
|||
]
|
||||
|
||||
|
||||
class AdHocSerializer(serializers.ModelSerializer):
|
||||
tasks = serializers.ListField()
|
||||
|
||||
class Meta:
|
||||
model = AdHoc
|
||||
fields_mini = ['id']
|
||||
fields_small = fields_mini + [
|
||||
'tasks', "pattern", "args", "date_created",
|
||||
]
|
||||
fields_fk = ["last_execution"]
|
||||
fields_m2m = ["assets"]
|
||||
fields = fields_small + fields_fk + fields_m2m
|
||||
read_only_fields = [
|
||||
'date_created'
|
||||
]
|
||||
extra_kwargs = {
|
||||
"become": {'write_only': True}
|
||||
}
|
||||
|
||||
|
||||
class AdHocExecutionNestSerializer(serializers.ModelSerializer):
|
||||
last_success = serializers.ListField(source='success_hosts')
|
||||
last_failure = serializers.DictField(source='failed_hosts')
|
||||
|
@ -80,38 +73,3 @@ class AdHocExecutionNestSerializer(serializers.ModelSerializer):
|
|||
'last_success', 'last_failure', 'last_run', 'timedelta',
|
||||
'is_finished', 'is_success'
|
||||
)
|
||||
|
||||
|
||||
class AdHocDetailSerializer(AdHocSerializer):
|
||||
latest_execution = AdHocExecutionNestSerializer(allow_null=True)
|
||||
task_name = serializers.CharField(source='task.name')
|
||||
|
||||
class Meta(AdHocSerializer.Meta):
|
||||
fields = AdHocSerializer.Meta.fields + [
|
||||
'latest_execution', 'created_by', 'task_name'
|
||||
]
|
||||
|
||||
|
||||
# class CommandExecutionSerializer(serializers.ModelSerializer):
|
||||
# result = serializers.JSONField(read_only=True)
|
||||
# log_url = serializers.SerializerMethodField()
|
||||
#
|
||||
# class Meta:
|
||||
# model = CommandExecution
|
||||
# fields_mini = ['id']
|
||||
# fields_small = fields_mini + [
|
||||
# 'command', 'result', 'log_url',
|
||||
# 'is_finished', 'date_created', 'date_finished'
|
||||
# ]
|
||||
# fields_m2m = ['hosts']
|
||||
# fields = fields_small + fields_m2m
|
||||
# read_only_fields = [
|
||||
# 'result', 'is_finished', 'log_url', 'date_created',
|
||||
# 'date_finished'
|
||||
# ]
|
||||
# ref_name = 'OpsCommandExecution'
|
||||
#
|
||||
# @staticmethod
|
||||
# def get_log_url(obj):
|
||||
# return reverse('api-ops:celery-task-log', kwargs={'pk': obj.id})
|
||||
|
||||
|
|
|
@ -30,14 +30,15 @@ class CeleryPeriodTaskSerializer(serializers.ModelSerializer):
|
|||
class CeleryTaskSerializer(serializers.ModelSerializer):
|
||||
class Meta:
|
||||
model = CeleryTask
|
||||
fields = [
|
||||
'id', 'name', 'meta', 'state', 'last_published_time',
|
||||
]
|
||||
read_only_fields = ['id', 'name', 'meta', 'summary', 'state', 'last_published_time']
|
||||
fields = read_only_fields
|
||||
|
||||
|
||||
class CeleryTaskExecutionSerializer(serializers.ModelSerializer):
|
||||
class Meta:
|
||||
model = CeleryTaskExecution
|
||||
fields = [
|
||||
"id", "name", "args", "kwargs", "state", "is_finished", "date_published", "date_start", "date_finished"
|
||||
"id", "name", "args", "kwargs", "time_cost", "timedelta", "state", "is_finished", "date_published",
|
||||
"date_start",
|
||||
"date_finished"
|
||||
]
|
||||
|
|
|
@ -0,0 +1,34 @@
|
|||
from rest_framework import serializers
|
||||
from common.drf.fields import ReadableHiddenField
|
||||
from ops.mixin import PeriodTaskSerializerMixin
|
||||
from ops.models import Job, JobExecution
|
||||
from orgs.mixins.serializers import BulkOrgResourceModelSerializer
|
||||
|
||||
_all_ = []
|
||||
|
||||
|
||||
class JobSerializer(BulkOrgResourceModelSerializer, PeriodTaskSerializerMixin):
|
||||
owner = ReadableHiddenField(default=serializers.CurrentUserDefault())
|
||||
|
||||
class Meta:
|
||||
model = Job
|
||||
read_only_fields = ["id", "date_last_run", "date_created", "date_updated", "average_time_cost"]
|
||||
fields = read_only_fields + [
|
||||
"name", "instant", "type", "module", "args", "playbook", "assets", "runas_policy", "runas", "owner",
|
||||
"parameters_define",
|
||||
"timeout",
|
||||
"chdir",
|
||||
"comment",
|
||||
"summary",
|
||||
"is_periodic", "interval", "crontab"
|
||||
]
|
||||
|
||||
|
||||
class JobExecutionSerializer(serializers.ModelSerializer):
|
||||
class Meta:
|
||||
model = JobExecution
|
||||
read_only_fields = ["id", "task_id", "timedelta", "time_cost", 'is_finished', 'date_start', 'date_created',
|
||||
'is_success', 'task_id', 'short_id']
|
||||
fields = read_only_fields + [
|
||||
"job", "parameters"
|
||||
]
|
|
@ -0,0 +1,28 @@
|
|||
import os
|
||||
|
||||
from rest_framework import serializers
|
||||
|
||||
from common.drf.fields import ReadableHiddenField
|
||||
from ops.models import Playbook
|
||||
|
||||
|
||||
def parse_playbook_name(path):
|
||||
file_name = os.path.split(path)[-1]
|
||||
return file_name.split(".")[-2]
|
||||
|
||||
|
||||
class PlaybookSerializer(serializers.ModelSerializer):
|
||||
owner = ReadableHiddenField(default=serializers.CurrentUserDefault())
|
||||
|
||||
def create(self, validated_data):
|
||||
name = validated_data.get('name')
|
||||
if not name:
|
||||
path = validated_data.get('path').name
|
||||
validated_data['name'] = parse_playbook_name(path)
|
||||
return super().create(validated_data)
|
||||
|
||||
class Meta:
|
||||
model = Playbook
|
||||
fields = [
|
||||
"id", "name", "path", "date_created", "owner", "date_updated"
|
||||
]
|
|
@ -1,14 +1,16 @@
|
|||
import ast
|
||||
from celery import signals
|
||||
|
||||
from django.db import transaction
|
||||
from django.core.cache import cache
|
||||
from django.dispatch import receiver
|
||||
from django.db.utils import ProgrammingError
|
||||
from django.utils import translation, timezone
|
||||
from django.utils.translation import gettext as _
|
||||
from django.core.cache import cache
|
||||
from celery import signals, current_app
|
||||
|
||||
from common.db.utils import close_old_connections, get_logger
|
||||
from common.signals import django_ready
|
||||
from common.db.utils import close_old_connections, get_logger
|
||||
|
||||
from .celery import app
|
||||
from .models import CeleryTaskExecution, CeleryTask
|
||||
|
||||
|
@ -23,15 +25,15 @@ def sync_registered_tasks(*args, **kwargs):
|
|||
with transaction.atomic():
|
||||
try:
|
||||
db_tasks = CeleryTask.objects.all()
|
||||
except Exception as e:
|
||||
return
|
||||
celery_task_names = [key for key in app.tasks]
|
||||
db_task_names = db_tasks.values_list('name', flat=True)
|
||||
celery_task_names = [key for key in app.tasks]
|
||||
db_task_names = db_tasks.values_list('name', flat=True)
|
||||
|
||||
db_tasks.exclude(name__in=celery_task_names).delete()
|
||||
not_in_db_tasks = set(celery_task_names) - set(db_task_names)
|
||||
tasks_to_create = [CeleryTask(name=name) for name in not_in_db_tasks]
|
||||
CeleryTask.objects.bulk_create(tasks_to_create)
|
||||
db_tasks.exclude(name__in=celery_task_names).delete()
|
||||
not_in_db_tasks = set(celery_task_names) - set(db_task_names)
|
||||
tasks_to_create = [CeleryTask(name=name) for name in not_in_db_tasks]
|
||||
CeleryTask.objects.bulk_create(tasks_to_create)
|
||||
except ProgrammingError:
|
||||
pass
|
||||
|
||||
|
||||
@signals.before_task_publish.connect
|
||||
|
@ -45,7 +47,7 @@ def before_task_publish(headers=None, **kwargs):
|
|||
@signals.task_prerun.connect
|
||||
def on_celery_task_pre_run(task_id='', **kwargs):
|
||||
# 更新状态
|
||||
CeleryTaskExecution.objects.filter(id=task_id)\
|
||||
CeleryTaskExecution.objects.filter(id=task_id) \
|
||||
.update(state='RUNNING', date_start=timezone.now())
|
||||
# 关闭之前的数据库连接
|
||||
close_old_connections()
|
||||
|
|
|
@ -1,18 +1,16 @@
|
|||
# coding: utf-8
|
||||
import os
|
||||
import random
|
||||
import subprocess
|
||||
|
||||
from django.conf import settings
|
||||
from celery import shared_task, subtask
|
||||
from celery import signals
|
||||
from celery import shared_task
|
||||
|
||||
from celery.exceptions import SoftTimeLimitExceeded
|
||||
from django.utils import timezone
|
||||
from django.utils.translation import ugettext_lazy as _, gettext
|
||||
from django.utils.translation import ugettext_lazy as _
|
||||
|
||||
from common.utils import get_logger, get_object_or_none, get_log_keep_day
|
||||
from orgs.utils import tmp_to_root_org, tmp_to_org
|
||||
from orgs.utils import tmp_to_org
|
||||
from .celery.decorator import (
|
||||
register_as_period_task, after_app_shutdown_clean_periodic,
|
||||
after_app_ready_start
|
||||
|
@ -21,32 +19,19 @@ from .celery.utils import (
|
|||
create_or_update_celery_periodic_tasks, get_celery_periodic_task,
|
||||
disable_celery_periodic_task, delete_celery_periodic_task
|
||||
)
|
||||
from .models import CeleryTaskExecution, AdHoc, Playbook
|
||||
from .models import CeleryTaskExecution, Job, JobExecution
|
||||
from .notifications import ServerPerformanceCheckUtil
|
||||
|
||||
logger = get_logger(__file__)
|
||||
|
||||
|
||||
def rerun_task():
|
||||
pass
|
||||
|
||||
|
||||
@shared_task(soft_time_limit=60, queue="ansible", verbose_name=_("Run ansible task"))
|
||||
def run_adhoc(tid, **kwargs):
|
||||
"""
|
||||
:param tid: is the tasks serialized data
|
||||
:param callback: callback function name
|
||||
:return:
|
||||
"""
|
||||
with tmp_to_root_org():
|
||||
task = get_object_or_none(AdHoc, id=tid)
|
||||
if not task:
|
||||
logger.error("No task found")
|
||||
return
|
||||
with tmp_to_org(task.org):
|
||||
execution = task.create_execution()
|
||||
def run_ops_job(job_id):
|
||||
job = get_object_or_none(Job, id=job_id)
|
||||
with tmp_to_org(job.org):
|
||||
execution = job.create_execution()
|
||||
try:
|
||||
execution.start(**kwargs)
|
||||
execution.start()
|
||||
except SoftTimeLimitExceeded:
|
||||
execution.set_error('Run timeout')
|
||||
logger.error("Run adhoc timeout")
|
||||
|
@ -55,40 +40,21 @@ def run_adhoc(tid, **kwargs):
|
|||
logger.error("Start adhoc execution error: {}".format(e))
|
||||
|
||||
|
||||
@shared_task(soft_time_limit=60, queue="ansible", verbose_name=_("Run ansible command"))
|
||||
def run_playbook(pid, **kwargs):
|
||||
with tmp_to_root_org():
|
||||
task = get_object_or_none(Playbook, id=pid)
|
||||
if not task:
|
||||
logger.error("No task found")
|
||||
return
|
||||
|
||||
with tmp_to_org(task.org):
|
||||
execution = task.create_execution()
|
||||
@shared_task(soft_time_limit=60, queue="ansible", verbose_name=_("Run ansible task execution"))
|
||||
def run_ops_job_executions(execution_id, **kwargs):
|
||||
execution = get_object_or_none(JobExecution, id=execution_id)
|
||||
with tmp_to_org(execution.org):
|
||||
try:
|
||||
execution.start(**kwargs)
|
||||
execution.start()
|
||||
except SoftTimeLimitExceeded:
|
||||
execution.set_error('Run timeout')
|
||||
logger.error("Run playbook timeout")
|
||||
logger.error("Run adhoc timeout")
|
||||
except Exception as e:
|
||||
execution.set_error(e)
|
||||
logger.error("Run playbook execution error: {}".format(e))
|
||||
logger.error("Start adhoc execution error: {}".format(e))
|
||||
|
||||
|
||||
@shared_task
|
||||
@after_app_shutdown_clean_periodic
|
||||
@register_as_period_task(interval=3600 * 24, description=_("Clean task history period"))
|
||||
def clean_tasks_adhoc_period():
|
||||
logger.debug("Start clean task adhoc and run history")
|
||||
tasks = Task.objects.all()
|
||||
for task in tasks:
|
||||
adhoc = task.adhoc.all().order_by('-date_created')[5:]
|
||||
for ad in adhoc:
|
||||
ad.execution.all().delete()
|
||||
ad.delete()
|
||||
|
||||
|
||||
@shared_task
|
||||
@shared_task(verbose_name=_('Periodic clear celery tasks'))
|
||||
@after_app_shutdown_clean_periodic
|
||||
@register_as_period_task(interval=3600 * 24, description=_("Clean celery log period"))
|
||||
def clean_celery_tasks_period():
|
||||
|
@ -107,7 +73,7 @@ def clean_celery_tasks_period():
|
|||
subprocess.call(command, shell=True)
|
||||
|
||||
|
||||
@shared_task
|
||||
@shared_task(verbose_name=_('Clear celery periodic tasks'))
|
||||
@after_app_ready_start
|
||||
def clean_celery_periodic_tasks():
|
||||
"""清除celery定时任务"""
|
||||
|
@ -130,7 +96,7 @@ def clean_celery_periodic_tasks():
|
|||
logger.info('Clean task failure: {}'.format(task))
|
||||
|
||||
|
||||
@shared_task
|
||||
@shared_task(verbose_name=_('Create or update periodic tasks'))
|
||||
@after_app_ready_start
|
||||
def create_or_update_registered_periodic_tasks():
|
||||
from .celery.decorator import get_register_period_tasks
|
||||
|
@ -138,37 +104,7 @@ def create_or_update_registered_periodic_tasks():
|
|||
create_or_update_celery_periodic_tasks(task)
|
||||
|
||||
|
||||
@shared_task
|
||||
@shared_task(verbose_name=_("Periodic check service performance"))
|
||||
@register_as_period_task(interval=3600)
|
||||
def check_server_performance_period():
|
||||
ServerPerformanceCheckUtil().check_and_publish()
|
||||
|
||||
|
||||
@shared_task(verbose_name=_("Hello"), comment="an test shared task")
|
||||
def hello(name, callback=None):
|
||||
from users.models import User
|
||||
import time
|
||||
|
||||
count = User.objects.count()
|
||||
print(gettext("Hello") + ': ' + name)
|
||||
print("Count: ", count)
|
||||
time.sleep(1)
|
||||
return gettext("Hello")
|
||||
|
||||
|
||||
@shared_task(verbose_name="Hello Error", comment="an test shared task error")
|
||||
def hello_error():
|
||||
raise Exception("must be error")
|
||||
|
||||
|
||||
@shared_task(verbose_name="Hello Random", comment="some time error and some time success")
|
||||
def hello_random():
|
||||
i = random.randint(0, 1)
|
||||
if i == 1:
|
||||
raise Exception("must be error")
|
||||
|
||||
|
||||
@shared_task
|
||||
def hello_callback(result):
|
||||
print(result)
|
||||
print("Hello callback")
|
||||
|
|
|
@ -4,7 +4,6 @@ from __future__ import unicode_literals
|
|||
from django.urls import path
|
||||
from rest_framework.routers import DefaultRouter
|
||||
from rest_framework_bulk.routes import BulkRouter
|
||||
from rest_framework_nested import routers
|
||||
|
||||
from .. import api
|
||||
|
||||
|
@ -13,23 +12,25 @@ app_name = "ops"
|
|||
router = DefaultRouter()
|
||||
bulk_router = BulkRouter()
|
||||
|
||||
router.register(r'adhoc', api.AdHocViewSet, 'adhoc')
|
||||
router.register(r'adhoc-executions', api.AdHocExecutionViewSet, 'execution')
|
||||
router.register(r'adhocs', api.AdHocViewSet, 'adhoc')
|
||||
router.register(r'playbooks', api.PlaybookViewSet, 'playbook')
|
||||
router.register(r'jobs', api.JobViewSet, 'job')
|
||||
router.register(r'job-executions', api.JobExecutionViewSet, 'job-execution')
|
||||
|
||||
router.register(r'celery/period-tasks', api.CeleryPeriodTaskViewSet, 'celery-period-task')
|
||||
|
||||
router.register(r'tasks', api.CeleryTaskViewSet, 'task')
|
||||
|
||||
task_router = routers.NestedDefaultRouter(router, r'tasks', lookup='task')
|
||||
task_router.register(r'executions', api.CeleryTaskExecutionViewSet, 'task-execution')
|
||||
router.register(r'task-executions', api.CeleryTaskExecutionViewSet, 'task-executions')
|
||||
|
||||
urlpatterns = [
|
||||
|
||||
path('ansible/job-execution/<uuid:pk>/log/', api.AnsibleTaskLogApi.as_view(), name='job-execution-log'),
|
||||
|
||||
path('celery/task/<uuid:name>/task-execution/<uuid:pk>/log/', api.CeleryTaskExecutionLogApi.as_view(),
|
||||
name='celery-task-execution-log'),
|
||||
path('celery/task/<uuid:name>/task-execution/<uuid:pk>/result/', api.CeleryResultApi.as_view(),
|
||||
name='celery-task-execution-result'),
|
||||
|
||||
path('ansible/task-execution/<uuid:pk>/log/', api.AnsibleTaskLogApi.as_view(), name='ansible-task-log'),
|
||||
]
|
||||
|
||||
urlpatterns += (router.urls + bulk_router.urls + task_router.urls)
|
||||
urlpatterns += (router.urls + bulk_router.urls)
|
||||
|
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue