Merge pull request #15412 from jumpserver/dev

v4.10.0
pull/15421/head
Bryan 2025-05-15 17:11:43 +08:00 committed by GitHub
commit d0cb9e5432
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
192 changed files with 21939 additions and 14001 deletions

View File

@ -8,4 +8,6 @@ celerybeat.pid
.vagrant/
apps/xpack/.git
.history/
.idea
.idea
.venv/
.env

4
.gitattributes vendored
View File

@ -1,4 +0,0 @@
*.mmdb filter=lfs diff=lfs merge=lfs -text
*.mo filter=lfs diff=lfs merge=lfs -text
*.ipdb filter=lfs diff=lfs merge=lfs -text
leak_passwords.db filter=lfs diff=lfs merge=lfs -text

View File

@ -1,10 +1,14 @@
version: 2
updates:
- package-ecosystem: "pip"
- package-ecosystem: "uv"
directory: "/"
schedule:
interval: "weekly"
day: "monday"
time: "09:30"
timezone: "Asia/Shanghai"
target-branch: dev
target-branch: dev
groups:
python-dependencies:
patterns:
- "*"

3
.gitignore vendored
View File

@ -46,3 +46,6 @@ test.py
.test/
*.mo
apps.iml
*.db
*.mmdb
*.ipdb

View File

@ -1,4 +1,4 @@
FROM jumpserver/core-base:20250415_032719 AS stage-build
FROM jumpserver/core-base:20250509_094529 AS stage-build
ARG VERSION

View File

@ -1,6 +1,6 @@
FROM python:3.11-slim-bullseye
ARG TARGETARCH
COPY --from=ghcr.io/astral-sh/uv:0.6.14 /uv /uvx /usr/local/bin/
# Install APT dependencies
ARG DEPENDENCIES=" \
ca-certificates \
@ -43,18 +43,19 @@ WORKDIR /opt/jumpserver
ARG PIP_MIRROR=https://pypi.org/simple
ENV POETRY_PYPI_MIRROR_URL=${PIP_MIRROR}
ENV ANSIBLE_COLLECTIONS_PATHS=/opt/py3/lib/python3.11/site-packages/ansible_collections
ENV LANG=en_US.UTF-8 \
PATH=/opt/py3/bin:$PATH
ENV UV_LINK_MODE=copy
RUN --mount=type=cache,target=/root/.cache \
--mount=type=bind,source=poetry.lock,target=poetry.lock \
--mount=type=bind,source=pyproject.toml,target=pyproject.toml \
--mount=type=bind,source=utils/clean_site_packages.sh,target=clean_site_packages.sh \
--mount=type=bind,source=requirements/clean_site_packages.sh,target=clean_site_packages.sh \
--mount=type=bind,source=requirements/collections.yml,target=collections.yml \
--mount=type=bind,source=requirements/static_files.sh,target=utils/static_files.sh \
set -ex \
&& python3 -m venv /opt/py3 \
&& pip install poetry poetry-plugin-pypi-mirror -i ${PIP_MIRROR} \
&& . /opt/py3/bin/activate \
&& poetry config virtualenvs.create false \
&& poetry install --no-cache --only main \
&& ansible-galaxy collection install -r collections.yml --force --ignore-certs \
&& bash clean_site_packages.sh \
&& poetry cache clear pypi --all
&& uv venv \
&& uv pip install -i${PIP_MIRROR} -r pyproject.toml \
&& ln -sf $(pwd)/.venv /opt/py3 \
&& bash utils/static_files.sh \
&& bash clean_site_packages.sh

View File

@ -24,11 +24,7 @@ RUN set -ex \
WORKDIR /opt/jumpserver
ARG PIP_MIRROR=https://pypi.org/simple
ENV POETRY_PYPI_MIRROR_URL=${PIP_MIRROR}
COPY poetry.lock pyproject.toml ./
RUN set -ex \
&& . /opt/py3/bin/activate \
&& pip install poetry poetry-plugin-pypi-mirror -i ${PIP_MIRROR} \
&& poetry install --only xpack \
&& poetry cache clear pypi --all
RUN set -ex \
&& uv pip install -i${PIP_MIRROR} --group xpack

View File

@ -5,6 +5,8 @@
## An open-source PAM tool (Bastion Host)
[![][license-shield]][license-link]
[![][docs-shield]][docs-link]
[![][deepwiki-shield]][deepwiki-link]
[![][discord-shield]][discord-link]
[![][docker-shield]][docker-link]
[![][github-release-shield]][github-release-link]
@ -101,6 +103,7 @@ Unless required by applicable law or agreed to in writing, software distributed
<!-- JumpServer official link -->
[docs-link]: https://jumpserver.com/docs
[discord-link]: https://discord.com/invite/W6vYXmAQG2
[deepwiki-link]: https://deepwiki.com/jumpserver/jumpserver/
[contributing-link]: https://github.com/jumpserver/jumpserver/blob/dev/CONTRIBUTING.md
<!-- JumpServer Other link-->
@ -111,8 +114,10 @@ Unless required by applicable law or agreed to in writing, software distributed
[github-issues-link]: https://github.com/jumpserver/jumpserver/issues
<!-- Shield link-->
[docs-shield]: https://img.shields.io/badge/documentation-148F76
[github-release-shield]: https://img.shields.io/github/v/release/jumpserver/jumpserver
[github-stars-shield]: https://img.shields.io/github/stars/jumpserver/jumpserver?color=%231890FF&style=flat-square
[github-stars-shield]: https://img.shields.io/github/stars/jumpserver/jumpserver?color=%231890FF&style=flat-square   
[docker-shield]: https://img.shields.io/docker/pulls/jumpserver/jms_all.svg
[license-shield]: https://img.shields.io/github/license/jumpserver/jumpserver
[deepwiki-shield]: https://img.shields.io/badge/deepwiki-devin?color=blue
[discord-shield]: https://img.shields.io/discord/1194233267294052363?style=flat&logo=discord&logoColor=%23f5f5f5&labelColor=%235462eb&color=%235462eb

View File

@ -62,8 +62,7 @@ class IntegrationApplicationViewSet(OrgBulkModelViewSet):
)
def get_once_secret(self, request, *args, **kwargs):
instance = self.get_object()
secret = instance.get_secret()
return Response(data={'id': instance.id, 'secret': secret})
return Response(data={'id': instance.id, 'secret': instance.secret})
@action(['GET'], detail=False, url_path='account-secret',
permission_classes=[RBACPermission])

View File

@ -10,7 +10,7 @@ from accounts.models import BaseAccountQuerySet
from accounts.utils import SecretGenerator
from assets.automations.base.manager import BasePlaybookManager
from assets.const import HostTypes
from common.db.utils import safe_db_connection
from common.db.utils import safe_atomic_db_connection
from common.utils import get_logger
logger = get_logger(__name__)
@ -170,7 +170,7 @@ class BaseChangeSecretPushManager(AccountBasePlaybookManager):
)
super().on_host_success(host, result)
with safe_db_connection():
with safe_atomic_db_connection():
account.save(update_fields=['secret', 'date_updated', 'date_change_secret', 'change_secret_status'])
self.save_record(recorder)
@ -198,6 +198,6 @@ class BaseChangeSecretPushManager(AccountBasePlaybookManager):
)
super().on_host_error(host, error, result)
with safe_db_connection():
with safe_atomic_db_connection():
account.save(update_fields=['change_secret_status', 'date_change_secret', 'date_updated'])
self.save_record(recorder)

View File

@ -1,3 +0,0 @@
version https://git-lfs.github.com/spec/v1
oid sha256:a2805a0264fc07ae597704841ab060edef8bf74654f525bc778cb9195d8cad0e
size 2547712

View File

@ -12,6 +12,7 @@ from accounts.models import Account, AccountRisk, RiskChoice
from assets.automations.base.manager import BaseManager
from common.const import ConfirmOrIgnore
from common.decorators import bulk_create_decorator, bulk_update_decorator
from settings.models import LeakPasswords
@bulk_create_decorator(AccountRisk)
@ -157,10 +158,8 @@ class CheckLeakHandler(BaseCheckHandler):
if not account.secret:
return False
sql = 'SELECT 1 FROM passwords WHERE password = ? LIMIT 1'
self.cursor.execute(sql, (account.secret,))
leak = self.cursor.fetchone() is not None
return leak
is_exist = LeakPasswords.objects.using('sqlite').filter(password=account.secret).exists()
return is_exist
def clean(self):
self.cursor.close()

View File

@ -85,6 +85,7 @@ class VerifyAccountManager(AccountBasePlaybookManager):
def on_host_error(self, host, error, result):
account = self.host_account_mapper.get(host)
try:
account.set_connectivity(Connectivity.ERR)
error_tp = account.get_err_connectivity(error)
account.set_connectivity(error_tp)
except Exception as e:
print(f'\033[31m Update account {account.name} connectivity failed: {e} \033[0m\n')

View File

@ -629,10 +629,15 @@ class Migration(migrations.Migration):
name="connectivity",
field=models.CharField(
choices=[
("-", "Unknown"),
("na", "N/A"),
("ok", "OK"),
("err", "Error"),
('-', 'Unknown'),
('na', 'N/A'),
('ok', 'OK'),
('err', 'Error'),
('auth_err', 'Authentication error'),
('password_err', 'Invalid password error'),
('openssh_key_err', 'OpenSSH key error'),
('ntlm_err', 'NTLM credentials rejected error'),
('create_temp_err', 'Create temporary error')
],
default="-",
max_length=16,

View File

@ -0,0 +1,29 @@
# Generated by Django 4.1.13 on 2025-05-06 10:23
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('accounts', '0006_alter_accountrisk_username_and_more'),
]
operations = [
migrations.AlterField(
model_name='account',
name='connectivity',
field=models.CharField(choices=[
('-', 'Unknown'),
('na', 'N/A'),
('ok', 'OK'),
('err', 'Error'),
('rdp_err', 'RDP error'),
('auth_err', 'Authentication error'),
('password_err', 'Invalid password error'),
('openssh_key_err', 'OpenSSH key error'),
('ntlm_err', 'NTLM credentials rejected error'),
('create_temp_err', 'Create temporary error')
],
default='-', max_length=16, verbose_name='Connectivity'),
),
]

View File

@ -166,9 +166,12 @@ class Account(AbsConnectivity, LabeledMixin, BaseAccount, JSONFilterMixin):
return self.ds.domain_name
return ''
def username_has_domain(self):
return '@' in self.username or '\\' in self.username
@property
def full_username(self):
if self.ds_domain:
if not self.username_has_domain() and self.ds_domain:
return '{}@{}'.format(self.username, self.ds_domain)
return self.username

View File

@ -5,6 +5,7 @@ from rest_framework import serializers
from accounts.models import IntegrationApplication
from acls.serializers.rules import ip_group_child_validator, ip_group_help_text
from common.serializers.fields import JSONManyToManyField
from common.utils import random_string
from orgs.mixins.serializers import BulkOrgResourceModelSerializer
@ -37,6 +38,10 @@ class IntegrationApplicationSerializer(BulkOrgResourceModelSerializer):
data['logo'] = static('img/logo.png')
return data
def validate(self, attrs):
attrs['secret'] = random_string(36)
return attrs
class IntegrationAccountSecretSerializer(serializers.Serializer):
asset = serializers.CharField(required=False, allow_blank=True)

View File

@ -107,16 +107,18 @@ def execute_automation_record_task(record_ids, tp):
)
@register_as_period_task(crontab=CRONTAB_AT_AM_THREE)
def clean_change_secret_and_push_record_period():
from accounts.models import ChangeSecretRecord
from accounts.models import ChangeSecretRecord, PushSecretRecord
print('Start clean change secret and push record period')
with tmp_to_root_org():
now = timezone.now()
days = get_log_keep_day('ACCOUNT_CHANGE_SECRET_RECORD_KEEP_DAYS')
expired_day = now - datetime.timedelta(days=days)
records = ChangeSecretRecord.objects.filter(
date_updated__lt=expired_day
).filter(
Q(execution__isnull=True) | Q(asset__isnull=True) | Q(account__isnull=True)
)
expired_time = now - datetime.timedelta(days=days)
records.delete()
null_related_q = Q(execution__isnull=True) | Q(asset__isnull=True) | Q(account__isnull=True)
expired_q = Q(date_updated__lt=expired_time)
ChangeSecretRecord.objects.filter(null_related_q).delete()
ChangeSecretRecord.objects.filter(expired_q).delete()
PushSecretRecord.objects.filter(null_related_q).delete()
PushSecretRecord.objects.filter(expired_q).delete()

View File

@ -32,9 +32,9 @@ class CommandFilterACLSerializer(BaseSerializer, BulkOrgResourceModelSerializer)
class Meta(BaseSerializer.Meta):
model = CommandFilterACL
fields = BaseSerializer.Meta.fields + ['command_groups']
action_choices_exclude = [ActionChoices.notice,
ActionChoices.face_verify,
ActionChoices.face_online]
action_choices_exclude = [
ActionChoices.notice, ActionChoices.face_verify, ActionChoices.face_online
]
class CommandReviewSerializer(serializers.Serializer):

View File

@ -14,5 +14,6 @@ class ConnectMethodACLSerializer(BaseSerializer, BulkOrgResourceModelSerializer)
if i not in ['assets', 'accounts']
]
action_choices_exclude = BaseSerializer.Meta.action_choices_exclude + [
ActionChoices.review, ActionChoices.accept, ActionChoices.notice
ActionChoices.review, ActionChoices.accept, ActionChoices.notice,
ActionChoices.face_verify, ActionChoices.face_online
]

View File

@ -1,10 +1,10 @@
from .asset import *
from .category import *
from .domain import *
from .favorite_asset import *
from .mixin import *
from .my_asset import *
from .node import *
from .platform import *
from .protocol import *
from .tree import *
from .my_asset import *
from .zone import *

View File

@ -37,12 +37,12 @@ class AssetFilterSet(BaseFilterSet):
platform = drf_filters.CharFilter(method='filter_platform')
is_gateway = drf_filters.BooleanFilter(method='filter_is_gateway')
exclude_platform = drf_filters.CharFilter(field_name="platform__name", lookup_expr='exact', exclude=True)
domain = drf_filters.CharFilter(method='filter_domain')
zone = drf_filters.CharFilter(method='filter_zone')
type = drf_filters.CharFilter(field_name="platform__type", lookup_expr="exact")
category = drf_filters.CharFilter(field_name="platform__category", lookup_expr="exact")
protocols = drf_filters.CharFilter(method='filter_protocols')
domain_enabled = drf_filters.BooleanFilter(
field_name="platform__domain_enabled", lookup_expr="exact"
gateway_enabled = drf_filters.BooleanFilter(
field_name="platform__gateway_enabled", lookup_expr="exact"
)
ping_enabled = drf_filters.BooleanFilter(
field_name="platform__automation__ping_enabled", lookup_expr="exact"
@ -85,11 +85,11 @@ class AssetFilterSet(BaseFilterSet):
return queryset
@staticmethod
def filter_domain(queryset, name, value):
def filter_zone(queryset, name, value):
if is_uuid(value):
return queryset.filter(domain_id=value)
return queryset.filter(zone_id=value)
else:
return queryset.filter(domain__name__contains=value)
return queryset.filter(zone__name__contains=value)
@staticmethod
def filter_protocols(queryset, name, value):
@ -171,10 +171,10 @@ class AssetViewSet(SuggestionMixin, BaseAssetViewSet):
@action(methods=["GET"], detail=True, url_path="gateways")
def gateways(self, *args, **kwargs):
asset = self.get_object()
if not asset.domain:
if not asset.zone:
gateways = Gateway.objects.none()
else:
gateways = asset.domain.gateways
gateways = asset.zone.gateways
return self.get_paginated_response_from_queryset(gateways)
@action(methods=['post'], detail=False, url_path='sync-platform-protocols')

View File

@ -9,24 +9,24 @@ from common.utils import get_logger
from orgs.mixins.api import OrgBulkModelViewSet
from .asset import HostViewSet
from .. import serializers
from ..models import Domain, Gateway
from ..models import Zone, Gateway
logger = get_logger(__file__)
__all__ = ['DomainViewSet', 'GatewayViewSet', "GatewayTestConnectionApi"]
__all__ = ['ZoneViewSet', 'GatewayViewSet', "GatewayTestConnectionApi"]
class DomainViewSet(OrgBulkModelViewSet):
model = Domain
class ZoneViewSet(OrgBulkModelViewSet):
model = Zone
filterset_fields = ("name",)
search_fields = filterset_fields
serializer_classes = {
'default': serializers.DomainSerializer,
'list': serializers.DomainListSerializer,
'default': serializers.ZoneSerializer,
'list': serializers.ZoneListSerializer,
}
def get_serializer_class(self):
if self.request.query_params.get('gateway'):
return serializers.DomainWithGatewaySerializer
return serializers.ZoneWithGatewaySerializer
return super().get_serializer_class()
def partial_update(self, request, *args, **kwargs):
@ -36,8 +36,8 @@ class DomainViewSet(OrgBulkModelViewSet):
class GatewayViewSet(HostViewSet):
perm_model = Gateway
filterset_fields = ("domain__name", "name", "domain")
search_fields = ("domain__name",)
filterset_fields = ("zone__name", "name", "zone")
search_fields = ("zone__name",)
def get_serializer_classes(self):
serializer_classes = super().get_serializer_classes()
@ -45,7 +45,7 @@ class GatewayViewSet(HostViewSet):
return serializer_classes
def get_queryset(self):
queryset = Domain.get_gateway_queryset()
queryset = Zone.get_gateway_queryset()
return queryset
@ -55,7 +55,7 @@ class GatewayTestConnectionApi(SingleObjectMixin, APIView):
}
def get_queryset(self):
queryset = Domain.get_gateway_queryset()
queryset = Zone.get_gateway_queryset()
return queryset
def post(self, request, *args, **kwargs):

View File

@ -17,7 +17,7 @@ from sshtunnel import SSHTunnelForwarder
from assets.automations.methods import platform_automation_methods
from common.const import Status
from common.db.utils import safe_db_connection
from common.db.utils import safe_atomic_db_connection
from common.tasks import send_mail_async
from common.utils import get_logger, lazyproperty, is_openssh_format_key, ssh_pubkey_gen
from ops.ansible import JMSInventory, DefaultCallback, SuperPlaybookRunner
@ -123,7 +123,7 @@ class BaseManager:
self.execution.result = self.result
self.execution.status = self.status
with safe_db_connection():
with safe_atomic_db_connection():
self.execution.save()
def print_summary(self):

View File

@ -1,3 +1,5 @@
from collections import Counter
__all__ = ['FormatAssetInfo']
@ -7,13 +9,37 @@ class FormatAssetInfo:
self.tp = tp
@staticmethod
def posix_format(info):
for cpu_model in info.get('cpu_model', []):
if cpu_model.endswith('GHz') or cpu_model.startswith("Intel"):
break
else:
cpu_model = ''
info['cpu_model'] = cpu_model[:48]
def get_cpu_model_count(cpus):
try:
models = [cpus[i + 1] + " " + cpus[i + 2] for i in range(0, len(cpus), 3)]
model_counts = Counter(models)
result = ', '.join([f"{model} x{count}" for model, count in model_counts.items()])
except Exception as e:
print(f"Error processing CPU model list: {e}")
result = ''
return result
@staticmethod
def get_gpu_model_count(gpus):
try:
model_counts = Counter(gpus)
result = ', '.join([f"{model} x{count}" for model, count in model_counts.items()])
except Exception as e:
print(f"Error processing GPU model list: {e}")
result = ''
return result
def posix_format(self, info):
cpus = self.get_cpu_model_count(info.get('cpu_model', []))
gpus = self.get_gpu_model_count(info.get('gpu_model', []))
info['gpu_model'] = gpus
info['cpu_model'] = cpus
info['cpu_count'] = info.get('cpu_count', 0)
return info

View File

@ -23,5 +23,16 @@
arch: "{{ ansible_architecture }}"
kernel: "{{ ansible_kernel }}"
- name: Get GPU info with nvidia-smi
shell: |
nvidia-smi --query-gpu=name,memory.total,driver_version --format=csv,noheader,nounits
register: gpu_info
ignore_errors: yes
- name: Merge GPU info into final info
set_fact:
info: "{{ info | combine({'gpu_model': gpu_info.stdout_lines | default([])}) }}"
- debug:
var: info

View File

@ -37,10 +37,11 @@ class PingManager(BasePlaybookManager):
def on_host_error(self, host, error, result):
asset, account = self.host_asset_and_account_mapper.get(host)
try:
asset.set_connectivity(Connectivity.ERR)
error_tp = asset.get_err_connectivity(error)
asset.set_connectivity(error_tp)
if not account:
return
account.set_connectivity(Connectivity.ERR)
account.set_connectivity(error_tp)
except Exception as e:
print(f'\033[31m Update account {account.name} or '
f'update asset {asset.name} connectivity failed: {e} \033[0m\n')

View File

@ -7,6 +7,12 @@ class Connectivity(TextChoices):
NA = 'na', _('N/A')
OK = 'ok', _('OK')
ERR = 'err', _('Error')
RDP_ERR = 'rdp_err', _('RDP error')
AUTH_ERR = 'auth_err', _('Authentication error')
PASSWORD_ERR = 'password_err', _('Invalid password error')
OPENSSH_KEY_ERR = 'openssh_key_err', _('OpenSSH key error')
NTLM_ERR = 'ntlm_err', _('NTLM credentials rejected error')
CREATE_TEMPORARY_ERR = 'create_temp_err', _('Create temporary error')
class AutomationTypes(TextChoices):

View File

@ -37,7 +37,7 @@ class FillType(models.TextChoices):
class BaseType(TextChoices):
"""
约束应该考虑代是对平台对限制避免多余对选项: mysql 开启 ssh,
或者开启了也没有作用, 比如 k8s 开启了 domain目前还不支持
或者开启了也没有作用, 比如 k8s 开启了 gateway 目前还不支持
"""
@classmethod

View File

@ -13,11 +13,11 @@ class CloudTypes(BaseType):
return {
'*': {
'charset_enabled': False,
'domain_enabled': False,
'gateway_enabled': False,
'su_enabled': False,
},
cls.K8S: {
'domain_enabled': True,
'gateway_enabled': True,
}
}

View File

@ -20,7 +20,7 @@ class CustomTypes(BaseType):
return {
'*': {
'charset_enabled': False,
'domain_enabled': False,
'gateway_enabled': False,
'su_enabled': False,
},
}

View File

@ -20,7 +20,7 @@ class DatabaseTypes(BaseType):
return {
'*': {
'charset_enabled': False,
'domain_enabled': True,
'gateway_enabled': True,
'su_enabled': False,
}
}

View File

@ -19,8 +19,8 @@ class DeviceTypes(BaseType):
return {
'*': {
'charset_enabled': False,
'domain_enabled': True,
'ds_enabled': False,
'gateway_enabled': True,
'ds_enabled': True,
'su_enabled': True,
'su_methods': ['enable', 'super', 'super_level']
}

View File

@ -16,7 +16,7 @@ class DirectoryTypes(BaseType):
return {
'*': {
'charset_enabled': True,
'domain_enabled': True,
'gateway_enabled': True,
'ds_enabled': False,
'su_enabled': True,
},

View File

@ -11,7 +11,7 @@ class GPTTypes(BaseType):
return {
'*': {
'charset_enabled': False,
'domain_enabled': False,
'gateway_enabled': False,
'su_enabled': False,
}
}

View File

@ -18,7 +18,7 @@ class HostTypes(BaseType):
'*': {
'charset_enabled': True,
'charset': 'utf-8', # default
'domain_enabled': True,
'gateway_enabled': True,
'su_enabled': True,
'ds_enabled': True,
'su_methods': ['sudo', 'su', 'only_sudo', 'only_su'],
@ -81,7 +81,7 @@ class HostTypes(BaseType):
{'name': 'Linux'},
{
'name': GATEWAY_NAME,
'domain_enabled': True,
'gateway_enabled': True,
}
],
cls.UNIX: [

View File

@ -344,6 +344,20 @@ class Protocol(ChoicesMixin, models.TextChoices):
if not xpack_enabled and config.get('xpack', False):
continue
protocols.append(protocol)
from assets.models.platform import PlatformProtocol
custom_protocols = (
PlatformProtocol.objects
.filter(platform__category='custom')
.values_list('name', flat=True)
.distinct()
)
for protocol in custom_protocols:
if protocol not in protocols:
if not protocol:
continue
label = protocol[0].upper() + protocol[1:]
protocols.append({'label': label, 'value': protocol})
return protocols
@classmethod

View File

@ -312,7 +312,7 @@ class AllTypes(ChoicesMixin):
'category': category,
'type': tp, 'internal': True,
'charset': constraints.get('charset', 'utf-8'),
'domain_enabled': constraints.get('domain_enabled', False),
'gateway_enabled': constraints.get('gateway_enabled', False),
'su_enabled': constraints.get('su_enabled', False),
}
if data['su_enabled'] and data.get('su_methods'):

View File

@ -11,7 +11,7 @@ class WebTypes(BaseType):
return {
'*': {
'charset_enabled': False,
'domain_enabled': False,
'gateway_enabled': False,
'su_enabled': False,
}
}

View File

@ -29,8 +29,19 @@ class Migration(migrations.Migration):
('org_id',
models.CharField(blank=True, db_index=True, default='', max_length=36, verbose_name='Organization')),
('connectivity',
models.CharField(choices=[('-', 'Unknown'), ('na', 'N/A'), ('ok', 'OK'), ('err', 'Error')],
default='-', max_length=16, verbose_name='Connectivity')),
models.CharField(
choices=[
('-', 'Unknown'),
('na', 'N/A'),
('ok', 'OK'),
('err', 'Error'),
('auth_err', 'Authentication error'),
('password_err', 'Invalid password error'),
('openssh_key_err', 'OpenSSH key error'),
('ntlm_err', 'NTLM credentials rejected error'),
('create_temp_err', 'Create temporary error')
],
default='-', max_length=16, verbose_name='Connectivity')),
('date_verified', models.DateTimeField(null=True, verbose_name='Date verified')),
('name', models.CharField(max_length=128, verbose_name='Name')),
('address', models.CharField(db_index=True, max_length=767, verbose_name='Address')),
@ -46,7 +57,8 @@ class Migration(migrations.Migration):
('match_asset', 'Can match asset'), ('change_assetnodes', 'Can change asset nodes')],
},
bases=(
assets.models.asset.common.NodesRelationMixin, assets.models.asset.common.JSONFilterMixin, models.Model),
assets.models.asset.common.NodesRelationMixin, assets.models.asset.common.JSONFilterMixin,
models.Model),
),
migrations.CreateModel(
name='AutomationExecution',

View File

@ -1,11 +1,11 @@
# Generated by Django 4.1.13 on 2024-05-09 03:16
import json
import assets.models.asset.common
from django.db.models import F, Q
import django.db.models.deletion
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
from django.db.models import F
import assets.models.asset.common
class Migration(migrations.Migration):
@ -39,22 +39,26 @@ class Migration(migrations.Migration):
migrations.AddField(
model_name='automationexecution',
name='automation',
field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, related_name='executions', to='assets.baseautomation', verbose_name='Automation task'),
field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, related_name='executions',
to='assets.baseautomation', verbose_name='Automation task'),
),
migrations.AddField(
model_name='asset',
name='domain',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='assets', to='assets.domain', verbose_name='Zone'),
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL,
related_name='assets', to='assets.domain', verbose_name='Zone'),
),
migrations.AddField(
model_name='asset',
name='nodes',
field=models.ManyToManyField(default=assets.models.asset.common.default_node, related_name='assets', to='assets.node', verbose_name='Nodes'),
field=models.ManyToManyField(default=assets.models.asset.common.default_node, related_name='assets',
to='assets.node', verbose_name='Nodes'),
),
migrations.AddField(
model_name='asset',
name='platform',
field=models.ForeignKey(on_delete=django.db.models.deletion.PROTECT, related_name='assets', to='assets.platform', verbose_name='Platform'),
field=models.ForeignKey(on_delete=django.db.models.deletion.PROTECT, related_name='assets',
to='assets.platform', verbose_name='Platform'),
),
migrations.CreateModel(
name='AssetBaseAutomation',
@ -71,7 +75,9 @@ class Migration(migrations.Migration):
migrations.CreateModel(
name='GatherFactsAutomation',
fields=[
('baseautomation_ptr', models.OneToOneField(auto_created=True, on_delete=django.db.models.deletion.CASCADE, parent_link=True, primary_key=True, serialize=False, to='assets.baseautomation')),
('baseautomation_ptr',
models.OneToOneField(auto_created=True, on_delete=django.db.models.deletion.CASCADE, parent_link=True,
primary_key=True, serialize=False, to='assets.baseautomation')),
],
options={
'verbose_name': 'Gather asset facts',
@ -81,7 +87,9 @@ class Migration(migrations.Migration):
migrations.CreateModel(
name='PingAutomation',
fields=[
('baseautomation_ptr', models.OneToOneField(auto_created=True, on_delete=django.db.models.deletion.CASCADE, parent_link=True, primary_key=True, serialize=False, to='assets.baseautomation')),
('baseautomation_ptr',
models.OneToOneField(auto_created=True, on_delete=django.db.models.deletion.CASCADE, parent_link=True,
primary_key=True, serialize=False, to='assets.baseautomation')),
],
options={
'verbose_name': 'Ping asset',

View File

@ -51,7 +51,7 @@ class Migration(migrations.Migration):
field=models.ManyToManyField(
related_name="assets",
to="assets.directoryservice",
verbose_name="Directory services",
),
verbose_name="Directory service",
)
),
]

View File

@ -0,0 +1,26 @@
# Generated by Django 4.1.13 on 2025-04-18 08:05
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
("assets", "0017_auto_20250407_1124"),
]
operations = [
migrations.RenameField(
model_name="platform",
old_name="domain_enabled",
new_name="gateway_enabled",
),
migrations.RenameModel(
old_name="Domain",
new_name="Zone",
),
migrations.RenameField(
model_name="asset",
old_name="domain",
new_name="zone",
),
]

View File

@ -0,0 +1,29 @@
# Generated by Django 4.1.13 on 2025-05-06 10:23
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('assets', '0018_rename_domain_zone'),
]
operations = [
migrations.AlterField(
model_name='asset',
name='connectivity',
field=models.CharField(
choices=[
('-', 'Unknown'),
('na', 'N/A'),
('ok', 'OK'),
('err', 'Error'),
('rdp_err', 'RDP error'),
('auth_err', 'Authentication error'),
('password_err', 'Invalid password error'),
('openssh_key_err', 'OpenSSH key error'),
('ntlm_err', 'NTLM credentials rejected error'),
('create_temp_err', 'Create temporary error')
], default='-', max_length=16, verbose_name='Connectivity'),
),
]

View File

@ -1,9 +1,10 @@
# noqa
from .base import *
from .platform import *
from .asset import *
from .label import Label
from .gateway import *
from .domain import *
from .zone import * # noqa
from .node import *
from .favorite_asset import *
from .automations import *

View File

@ -168,8 +168,8 @@ class Asset(NodesRelationMixin, LabeledMixin, AbsConnectivity, JSONFilterMixin,
platform = models.ForeignKey(
Platform, on_delete=models.PROTECT, verbose_name=_("Platform"), related_name='assets'
)
domain = models.ForeignKey(
"assets.Domain", null=True, blank=True, related_name='assets',
zone = models.ForeignKey(
"assets.Zone", null=True, blank=True, related_name='assets',
verbose_name=_("Zone"), on_delete=models.SET_NULL
)
nodes = models.ManyToManyField(
@ -244,7 +244,7 @@ class Asset(NodesRelationMixin, LabeledMixin, AbsConnectivity, JSONFilterMixin,
platform = self.platform
auto_config = {
'su_enabled': platform.su_enabled,
'domain_enabled': platform.domain_enabled,
'gateway_enabled': platform.gateway_enabled,
'ansible_enabled': False
}
automation = getattr(self.platform, 'automation', None)
@ -362,11 +362,11 @@ class Asset(NodesRelationMixin, LabeledMixin, AbsConnectivity, JSONFilterMixin,
@lazyproperty
def gateway(self):
if not self.domain_id:
if not self.zone_id:
return
if not self.platform.domain_enabled:
if not self.platform.gateway_enabled:
return
return self.domain.select_gateway()
return self.zone.select_gateway()
def as_node(self):
from assets.models import Node

View File

@ -23,6 +23,27 @@ class AbsConnectivity(models.Model):
self.date_verified = timezone.now()
self.save(update_fields=['connectivity', 'date_verified'])
@staticmethod
def get_err_connectivity(msg=None):
msg = (msg or '').strip().lower()
error_map = {
'rdp connection failed': Connectivity.RDP_ERR,
'expected openssh key': Connectivity.OPENSSH_KEY_ERR,
'invalid/incorrect password': Connectivity.PASSWORD_ERR,
'failed to create temporary': Connectivity.CREATE_TEMPORARY_ERR,
'ntlm: the specified credentials were rejected by the server': Connectivity.NTLM_ERR,
'permission denied': Connectivity.AUTH_ERR,
'authentication failed': Connectivity.AUTH_ERR,
'authentication failure': Connectivity.AUTH_ERR,
}
for key, value in error_map.items():
if key in msg:
return value
return Connectivity.ERR
@property
def is_connective(self):
if self.connectivity == Connectivity.OK:

View File

@ -101,7 +101,7 @@ class Platform(LabeledMixin, JMSBaseModel):
default=CharsetChoices.utf8, choices=CharsetChoices.choices,
max_length=8, verbose_name=_("Charset")
)
domain_enabled = models.BooleanField(default=True, verbose_name=_("Gateway enabled"))
gateway_enabled = models.BooleanField(default=True, verbose_name=_("Gateway enabled"))
ds_enabled = models.BooleanField(default=False, verbose_name=_("DS enabled"))
# 账号有关的
su_enabled = models.BooleanField(default=False, verbose_name=_("Su enabled"))

View File

@ -12,10 +12,10 @@ from .gateway import Gateway
logger = get_logger(__file__)
__all__ = ['Domain']
__all__ = ['Zone']
class Domain(LabeledMixin, JMSOrgBaseModel):
class Zone(LabeledMixin, JMSOrgBaseModel):
name = models.CharField(max_length=128, verbose_name=_('Name'))
class Meta:
@ -39,7 +39,7 @@ class Domain(LabeledMixin, JMSOrgBaseModel):
if not gateways:
gateways = self.active_gateways
if not gateways:
logger.warn(f'Not active gateway, domain={self}, pass')
logger.warning(f'Not active gateway, domain={self}, pass')
return None
return random.choice(gateways)
@ -49,7 +49,7 @@ class Domain(LabeledMixin, JMSOrgBaseModel):
@property
def gateways(self):
queryset = self.get_gateway_queryset().filter(domain=self)
queryset = self.get_gateway_queryset().filter(zone=self)
return queryset
@classmethod

View File

@ -32,7 +32,7 @@ class AssetPaginationBase(LimitOffsetPagination):
}
for k, v in self._request.query_params.items():
if k not in exclude_query_params and v is not None:
logger.warn(f'Not hit node.assets_amount because find a unknown query_param '
logger.warning(f'Not hit node.assets_amount because find a unknown query_param '
f'`{k}` -> {self._request.get_full_path()}')
return super().get_count(queryset)
node_assets_count = self.get_count_from_nodes(queryset)

View File

@ -154,7 +154,7 @@ class AssetSerializer(BulkOrgResourceModelSerializer, ResourceLabelsMixin, Writa
class Meta:
model = Asset
fields_fk = ['domain', 'platform']
fields_fk = ['zone', 'platform']
fields_mini = ['id', 'name', 'address'] + fields_fk
fields_small = fields_mini + ['is_active', 'comment']
fields_m2m = [
@ -233,7 +233,7 @@ class AssetSerializer(BulkOrgResourceModelSerializer, ResourceLabelsMixin, Writa
@classmethod
def setup_eager_loading(cls, queryset):
""" Perform necessary eager loading of data. """
queryset = queryset.prefetch_related('domain', 'nodes', 'protocols', 'directory_services') \
queryset = queryset.prefetch_related('zone', 'nodes', 'protocols', 'directory_services') \
.prefetch_related('platform', 'platform__automation') \
.annotate(category=F("platform__category")) \
.annotate(type=F("platform__type")) \
@ -271,9 +271,9 @@ class AssetSerializer(BulkOrgResourceModelSerializer, ResourceLabelsMixin, Writa
raise serializers.ValidationError({'platform': _("Platform not exist")})
return platform
def validate_domain(self, value):
def validate_zone(self, value):
platform = self._asset_platform
if platform.domain_enabled:
if platform.gateway_enabled:
return value
else:
return None

View File

@ -6,7 +6,7 @@ class HostGatheredInfoSerializer(serializers.Serializer):
vendor = serializers.CharField(max_length=64, required=False, allow_blank=True, label=_('Vendor'))
model = serializers.CharField(max_length=54, required=False, allow_blank=True, label=_('Model'))
sn = serializers.CharField(max_length=128, required=False, allow_blank=True, label=_('Serial number'))
cpu_model = serializers.CharField(max_length=64, allow_blank=True, required=False, label=_('CPU model'))
cpu_model = serializers.CharField(allow_blank=True, required=False, label=_('CPU model'))
cpu_count = serializers.CharField(max_length=64, required=False, allow_blank=True, label=_('CPU count'))
cpu_cores = serializers.CharField(max_length=64, required=False, allow_blank=True, label=_('CPU cores'))
cpu_vcpus = serializers.CharField(max_length=64, required=False, allow_blank=True, label=_('CPU vcpus'))
@ -17,6 +17,8 @@ class HostGatheredInfoSerializer(serializers.Serializer):
distribution_version = serializers.CharField(max_length=16, allow_blank=True, required=False, label=_('OS version'))
arch = serializers.CharField(max_length=16, allow_blank=True, required=False, label=_('OS arch'))
gpu_model = serializers.CharField(allow_blank=True, required=False, label=_('GPU model'))
category_gathered_serializer_map = {
'host': HostGatheredInfoSerializer,

View File

@ -8,12 +8,12 @@ from common.serializers import ResourceLabelsMixin
from common.serializers.fields import ObjectRelatedField
from orgs.mixins.serializers import BulkOrgResourceModelSerializer
from .gateway import GatewayWithAccountSecretSerializer
from ..models import Domain, Gateway
from ..models import Zone, Gateway
__all__ = ['DomainSerializer', 'DomainWithGatewaySerializer', 'DomainListSerializer']
__all__ = ['ZoneSerializer', 'ZoneWithGatewaySerializer', 'ZoneListSerializer']
class DomainSerializer(ResourceLabelsMixin, BulkOrgResourceModelSerializer):
class ZoneSerializer(ResourceLabelsMixin, BulkOrgResourceModelSerializer):
gateways = ObjectRelatedField(
many=True, required=False, label=_('Gateway'), queryset=Gateway.objects,
help_text=_(
@ -23,7 +23,7 @@ class DomainSerializer(ResourceLabelsMixin, BulkOrgResourceModelSerializer):
assets_amount = serializers.IntegerField(label=_('Assets amount'), read_only=True)
class Meta:
model = Domain
model = Zone
fields_mini = ['id', 'name']
fields_small = fields_mini + ['comment']
fields_m2m = ['assets', 'gateways', 'labels', 'assets_amount']
@ -55,9 +55,9 @@ class DomainSerializer(ResourceLabelsMixin, BulkOrgResourceModelSerializer):
return super().update(instance, validated_data)
class DomainListSerializer(DomainSerializer):
class Meta(DomainSerializer.Meta):
fields = list(set(DomainSerializer.Meta.fields + ['assets_amount']) - {'assets'})
class ZoneListSerializer(ZoneSerializer):
class Meta(ZoneSerializer.Meta):
fields = list(set(ZoneSerializer.Meta.fields + ['assets_amount']) - {'assets'})
@classmethod
def setup_eager_loading(cls, queryset):
@ -67,9 +67,9 @@ class DomainListSerializer(DomainSerializer):
return queryset
class DomainWithGatewaySerializer(serializers.ModelSerializer):
class ZoneWithGatewaySerializer(serializers.ModelSerializer):
gateways = GatewayWithAccountSecretSerializer(many=True, read_only=True)
class Meta:
model = Domain
model = Zone
fields = '__all__'

View File

@ -194,7 +194,7 @@ class PlatformSerializer(ResourceLabelsMixin, CommonSerializerMixin, WritableNes
]
fields_m2m = ['assets', 'assets_amount']
fields = fields_small + fields_m2m + [
"protocols", "domain_enabled", "su_enabled", "su_method",
"protocols", "gateway_enabled", "su_enabled", "su_method",
"ds_enabled", "automation", "comment", "custom_fields", "labels"
] + read_only_fields
extra_kwargs = {
@ -205,11 +205,11 @@ class PlatformSerializer(ResourceLabelsMixin, CommonSerializerMixin, WritableNes
"similar to logging in with a regular account and then switching to root"
)
},
"domain_enabled": {
"gateway_enabled": {
"label": _('Gateway enabled'),
"help_text": _("Assets can be connected using a zone gateway")
},
"domain_default": {"label": _('Default Domain')},
"zone_default": {"label": _('Default zone')},
'assets': {'required': False, 'label': _('Assets')},
}
@ -222,7 +222,7 @@ class PlatformSerializer(ResourceLabelsMixin, CommonSerializerMixin, WritableNes
return
name = self.initial_data.get('name')
if ' ' in name:
if name is not None and ' ' in name:
self.initial_data['name'] = name.replace(' ', '-')
if self.instance:
@ -262,8 +262,8 @@ class PlatformSerializer(ResourceLabelsMixin, CommonSerializerMixin, WritableNes
def validate_su_enabled(self, su_enabled):
return su_enabled and self.constraints.get('su_enabled', False)
def validate_domain_enabled(self, domain_enabled):
return domain_enabled and self.constraints.get('domain_enabled', False)
def validate_gateway_enabled(self, gateway_enabled):
return gateway_enabled and self.constraints.get('gateway_enabled', False)
def validate_automation(self, automation):
automation = automation or {}

View File

@ -25,10 +25,10 @@ def check_asset_can_run_ansible(asset):
def check_system_user_can_run_ansible(system_user):
if not system_user.auto_push_account:
logger.warn(f'Push system user task skip, auto push not enable: system_user={system_user.name}')
logger.warning(f'Push system user task skip, auto push not enable: system_user={system_user.name}')
return False
if not system_user.is_protocol_support_push:
logger.warn(f'Push system user task skip, protocol not support: '
logger.warning(f'Push system user task skip, protocol not support: '
f'system_user={system_user.name} protocol={system_user.protocol} '
f'support_protocol={system_user.SUPPORT_PUSH_PROTOCOLS}')
return False

View File

@ -20,7 +20,7 @@ router.register(r'directories', api.DSViewSet, 'ds')
router.register(r'customs', api.CustomViewSet, 'custom')
router.register(r'platforms', api.AssetPlatformViewSet, 'platform')
router.register(r'nodes', api.NodeViewSet, 'node')
router.register(r'domains', api.DomainViewSet, 'domain')
router.register(r'zones', api.ZoneViewSet, 'zone')
router.register(r'gateways', api.GatewayViewSet, 'gateway')
router.register(r'favorite-assets', api.FavoriteAssetViewSet, 'favorite-asset')
router.register(r'protocol-settings', api.PlatformProtocolViewSet, 'protocol-setting')

View File

@ -0,0 +1,32 @@
# Generated by Django 4.1.13 on 2025-04-21 06:15
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('audits', '0005_rename_serviceaccesslog'),
]
operations = [
migrations.AlterField(
model_name='ftplog',
name='account',
field=models.CharField(db_index=True, max_length=128, verbose_name='Account'),
),
migrations.AlterField(
model_name='ftplog',
name='asset',
field=models.CharField(db_index=True, max_length=1024, verbose_name='Asset'),
),
migrations.AlterField(
model_name='ftplog',
name='date_start',
field=models.DateTimeField(auto_now_add=True, verbose_name='Date start'),
),
migrations.AddIndex(
model_name='ftplog',
index=models.Index(fields=['date_start', 'org_id'], name='idx_date_start_org'),
),
]

View File

@ -56,19 +56,22 @@ class FTPLog(OrgModelMixin):
remote_addr = models.CharField(
max_length=128, verbose_name=_("Remote addr"), blank=True, null=True
)
asset = models.CharField(max_length=1024, verbose_name=_("Asset"))
account = models.CharField(max_length=128, verbose_name=_("Account"))
asset = models.CharField(max_length=1024, verbose_name=_("Asset"), db_index=True)
account = models.CharField(max_length=128, verbose_name=_("Account"), db_index=True)
operate = models.CharField(
max_length=16, verbose_name=_("Operate"), choices=OperateChoices.choices
)
filename = models.CharField(max_length=1024, verbose_name=_("Filename"))
is_success = models.BooleanField(default=True, verbose_name=_("Success"))
date_start = models.DateTimeField(auto_now_add=True, verbose_name=_("Date start"), db_index=True)
date_start = models.DateTimeField(auto_now_add=True, verbose_name=_("Date start"))
has_file = models.BooleanField(default=False, verbose_name=_("Can Download"))
session = models.CharField(max_length=36, verbose_name=_("Session"), default=uuid.uuid4)
class Meta:
verbose_name = _("File transfer log")
indexes = [
models.Index(fields=['date_start', 'org_id'], name='idx_date_start_org'),
]
@property
def filepath(self):

View File

@ -183,11 +183,11 @@ def on_django_start_set_operate_log_monitor_models(sender, **kwargs):
'ConnectionToken', 'SessionJoinRecord',
'HistoricalJob', 'Status', 'TicketStep', 'Ticket',
'UserAssetGrantedTreeNodeRelation', 'TicketAssignee',
'SuperTicket', 'SuperConnectionToken', 'PermNode',
'SuperTicket', 'SuperConnectionToken', 'AdminConnectionToken', 'PermNode',
'PermedAsset', 'PermedAccount', 'MenuPermission',
'Permission', 'TicketSession', 'ApplyLoginTicket',
'ApplyCommandTicket', 'ApplyLoginAssetTicket',
'FavoriteAsset', 'ChangeSecretRecord', 'AppProvider', 'Variable'
'FavoriteAsset', 'ChangeSecretRecord', 'AppProvider', 'Variable', 'LeakPasswords'
}
include_models = {'UserSession'}
for i, app in enumerate(apps.get_models(), 1):

View File

@ -96,17 +96,20 @@ def batch_delete(queryset, batch_size=3000):
def remove_files_by_days(root_path, days, file_types=None):
if file_types is None:
file_types = ['.json', '.tar', '.gz', '.mp4']
need_rm_files = []
expire_date = timezone.now() - timezone.timedelta(days=days)
timestamp = expire_date.timestamp()
for root, dirs, files in os.walk(root_path):
rm_files = []
for file in files:
if any(file.endswith(file_type) for file_type in file_types):
file_path = os.path.join(root, file)
if os.path.getmtime(file_path) <= timestamp:
need_rm_files.append(file_path)
for file in need_rm_files:
os.remove(file)
rm_files.append(file_path)
for file in rm_files:
try:
os.remove(file)
except Exception as e:
logger.error(f"Remove file {file} error: {e}")
def clean_expired_session_period():

View File

@ -1,9 +1,12 @@
import time
from django.conf import settings
from django.http import JsonResponse
from django.shortcuts import render
from django.utils.translation import gettext as _
from rest_framework.decorators import action
from rest_framework.permissions import IsAuthenticated, AllowAny
from rest_framework.response import Response
from authentication.mixins import AuthMixin
from common.api import JMSModelViewSet
@ -44,6 +47,9 @@ class PasskeyViewSet(AuthMixin, FlashMessageMixin, JMSModelViewSet):
@action(methods=['get'], detail=False, url_path='login', permission_classes=[AllowAny])
def login(self, request):
confirm_mfa = request.GET.get('mfa')
if confirm_mfa:
request.session['passkey_confirm_mfa'] = '1'
return render(request, 'authentication/passkey.html', {})
def redirect_to_error(self, error):
@ -64,8 +70,16 @@ class PasskeyViewSet(AuthMixin, FlashMessageMixin, JMSModelViewSet):
if not user:
return self.redirect_to_error(_('Auth failed'))
confirm_mfa = request.session.get('passkey_confirm_mfa')
if confirm_mfa:
request.session['CONFIRM_LEVEL'] = ConfirmType.values.index('mfa') + 1
request.session['CONFIRM_TIME'] = int(time.time())
request.session['passkey_confirm_mfa'] = ''
return Response('ok')
try:
self.check_oauth2_auth(user, settings.AUTH_BACKEND_PASSKEY)
self.mark_mfa_ok('passkey', user)
return self.redirect_to_guard_view()
except Exception as e:
msg = getattr(e, 'msg', '') or str(e)

View File

@ -34,6 +34,7 @@ class MFAType(TextChoices):
Email = 'email', _('Email')
Face = 'face', _('Face Recognition')
Radius = 'otp_radius', _('Radius')
Passkey = 'passkey', _('Passkey')
Custom = 'mfa_custom', _('Custom')

View File

@ -7,6 +7,7 @@ from django.utils.translation import gettext_lazy as _
class BaseMFA(abc.ABC):
placeholder = _('Please input security code')
skip_cache_check = False
has_code = True
def __init__(self, user):
"""

View File

@ -11,6 +11,7 @@ class MFAFace(BaseMFA, AuthFaceMixin):
display_name = MFAType.Face.name
placeholder = 'Face Recognition'
skip_cache_check = True
has_code = False
def _check_code(self, code):
assert self.is_authenticated()

View File

@ -49,4 +49,3 @@ class MFAOtp(BaseMFA):
def help_text_of_disable(self):
return ''

View File

@ -0,0 +1,46 @@
from django.conf import settings
from django.utils.translation import gettext_lazy as _
from authentication.mfa.base import BaseMFA
from ..const import MFAType
class MFAPasskey(BaseMFA):
name = MFAType.Passkey.value
display_name = MFAType.Passkey.name
placeholder = 'Passkey'
has_code = False
def _check_code(self, code):
assert self.is_authenticated()
return False, ''
def is_active(self):
if not self.is_authenticated():
return True
return self.user.passkey_set.count()
@staticmethod
def global_enabled():
return settings.AUTH_PASSKEY
def get_enable_url(self) -> str:
return '/ui/#/profile/passkeys'
def get_disable_url(self) -> str:
return '/ui/#/profile/passkeys'
def disable(self):
pass
def can_disable(self) -> bool:
return False
@staticmethod
def help_text_of_enable():
return _("Using passkey as MFA")
@staticmethod
def help_text_of_disable():
return _("Using passkey as MFA")

View File

@ -174,7 +174,7 @@ class AuthPreCheckMixin:
is_block = LoginBlockUtil(username, ip).is_block()
if not is_block:
return
logger.warn('Ip was blocked' + ': ' + username + ':' + ip)
logger.warning('Ip was blocked' + ': ' + username + ':' + ip)
exception = errors.BlockLoginError(username=username, ip=ip)
if raise_exception:
raise errors.BlockLoginError(username=username, ip=ip)
@ -253,7 +253,7 @@ class MFAMixin:
blocked = MFABlockUtils(username, ip).is_block()
if not blocked:
return
logger.warn('Ip was blocked' + ': ' + username + ':' + ip)
logger.warning('Ip was blocked' + ': ' + username + ':' + ip)
exception = errors.BlockMFAError(username=username, request=self.request, ip=ip)
if raise_exception:
raise exception
@ -323,7 +323,7 @@ class AuthPostCheckMixin:
def _check_passwd_is_too_simple(cls, user: User, password):
if not user.is_auth_backend_model():
return
if user.check_passwd_too_simple(password):
if user.check_passwd_too_simple(password) or user.check_leak_password(password):
message = _('Your password is too simple, please change it for security')
url = cls.generate_reset_password_url_with_flash_msg(user, message=message)
raise errors.PasswordTooSimple(url)

View File

@ -251,7 +251,7 @@ class ConnectionToken(JMSOrgBaseModel):
raise JMSException({'error': 'No host account available, please check the applet, host and account'})
host, account, lock_key = bulk_get(host_account, ('host', 'account', 'lock_key'))
gateway = host.domain.select_gateway() if host.domain else None
gateway = host.zone.select_gateway() if host.zone else None
platform = host.platform
data = {
@ -305,17 +305,17 @@ class ConnectionToken(JMSOrgBaseModel):
return account
@lazyproperty
def domain(self):
if not self.asset.platform.domain_enabled:
def zone(self):
if not self.asset.platform.gateway_enabled:
return
if self.asset.platform.name == GATEWAY_NAME:
return
domain = self.asset.domain if self.asset.domain else None
return domain
zone = self.asset.zone if self.asset.zone else None
return zone
@lazyproperty
def gateway(self):
if not self.asset or not self.domain:
if not self.asset or not self.zone:
return
return self.asset.gateway

View File

@ -4,7 +4,7 @@ from rest_framework import serializers
from accounts.const import SecretType
from accounts.models import Account
from acls.models import CommandGroup, CommandFilterACL
from assets.models import Asset, Platform, Gateway, Domain
from assets.models import Asset, Platform, Gateway, Zone
from assets.serializers.asset import AssetProtocolsSerializer
from assets.serializers.platform import PlatformSerializer
from common.serializers.fields import LabeledChoiceField
@ -135,7 +135,7 @@ class ConnectionTokenSecretSerializer(OrgResourceModelSerializerMixin):
account = _ConnectionTokenAccountSerializer(read_only=True, source='account_object')
gateway = _ConnectionTokenGatewaySerializer(read_only=True)
platform = _ConnectionTokenPlatformSerializer(read_only=True)
domain = ObjectRelatedField(queryset=Domain.objects, required=False, label=_('Domain'))
zone = ObjectRelatedField(queryset=Zone.objects, required=False, label=_('Domain'))
command_filter_acls = _ConnectionTokenCommandFilterACLSerializer(read_only=True, many=True)
expire_now = serializers.BooleanField(label=_('Expired now'), write_only=True, default=True)
connect_method = _ConnectTokenConnectMethodSerializer(read_only=True, source='connect_method_object')
@ -148,7 +148,7 @@ class ConnectionTokenSecretSerializer(OrgResourceModelSerializerMixin):
fields = [
'id', 'value', 'user', 'asset', 'account',
'platform', 'command_filter_acls', 'protocol',
'domain', 'gateway', 'actions', 'expire_at',
'zone', 'gateway', 'actions', 'expire_at',
'from_ticket', 'expire_now', 'connect_method',
'connect_options', 'face_monitor_token'
]

View File

@ -330,7 +330,7 @@
</h2>
<ul class=" nav navbar-top-links navbar-right">
<li class="dropdown">
<a class="dropdown-toggle login-page-language" data-toggle="dropdown" href="#" target="_blank">
<a class="dropdown-toggle login-page-language" data-bs-toggle="dropdown" href="#" target="_blank">
<i class="fa fa-globe fa-lg" style="margin-right: 2px"></i>
<span>{{ current_lang.title }}<b class="caret"></b></span>
</a>

View File

@ -5,12 +5,13 @@
<head>
<meta charset="UTF-8">
<title>Login passkey</title>
<script src="{% static "js/jquery-3.6.1.min.js" %}?_=9"></script>
<script src="{% static 'js/jquery-3.6.1.min.js' %}?_=9"></script>
</head>
<body>
<form action='{% url 'api-auth:passkey-auth' %}' method="post" id="loginForm">
<input type="hidden" name="passkeys" id="passkeys"/>
</form>
<form action="{% url 'api-auth:passkey-auth' %}" method="post" id="loginForm">
{% csrf_token %}
<input type="hidden" name="passkeys" id="passkeys"/>
</form>
</body>
<script>
const loginUrl = "/core/auth/login/";

View File

@ -1,16 +1,19 @@
# -*- coding: utf-8 -*-
#
import ipaddress
from datetime import datetime, timedelta
from urllib.parse import urljoin, urlparse
from django.conf import settings
from django.shortcuts import reverse
from django.templatetags.static import static
from django.utils.translation import gettext_lazy as _
from audits.const import DEFAULT_CITY
from users.models import User
from audits.models import UserLoginLog
from common.utils import get_ip_city, get_request_ip
from common.utils import get_logger, get_object_or_none
from common.utils import validate_ip, get_ip_city, get_request_ip
from common.utils import static_or_direct
from users.models import User
from .notifications import DifferentCityLoginMessage
logger = get_logger(__file__)
@ -33,8 +36,13 @@ def check_different_city_login_if_need(user, request):
return
city = get_ip_city(ip)
last_city = get_ip_city(last_user_login.ip)
if city == last_city:
last_cities = UserLoginLog.objects.filter(
datetime__gte=datetime.now() - timedelta(days=7),
username__in=usernames,
status=True
).exclude(city__in=city_white).values_list('city', flat=True).distinct()
if city in last_cities:
return
DifferentCityLoginMessage(user, ip, city).publish_async()
@ -70,3 +78,72 @@ def check_user_property_is_correct(username, **properties):
user = None
break
return user
def get_auth_methods():
return [
{
'name': 'OpenID',
'enabled': settings.AUTH_OPENID,
'url': reverse('authentication:openid:login'),
'logo': static('img/login_oidc_logo.png'),
'auto_redirect': True # 是否支持自动重定向
},
{
'name': 'CAS',
'enabled': settings.AUTH_CAS,
'url': reverse('authentication:cas:cas-login'),
'logo': static('img/login_cas_logo.png'),
'auto_redirect': True
},
{
'name': 'SAML2',
'enabled': settings.AUTH_SAML2,
'url': reverse('authentication:saml2:saml2-login'),
'logo': static('img/login_saml2_logo.png'),
'auto_redirect': True
},
{
'name': settings.AUTH_OAUTH2_PROVIDER,
'enabled': settings.AUTH_OAUTH2,
'url': reverse('authentication:oauth2:login'),
'logo': static_or_direct(settings.AUTH_OAUTH2_LOGO_PATH),
'auto_redirect': True
},
{
'name': _('WeCom'),
'enabled': settings.AUTH_WECOM,
'url': reverse('authentication:wecom-qr-login'),
'logo': static('img/login_wecom_logo.png'),
},
{
'name': _('DingTalk'),
'enabled': settings.AUTH_DINGTALK,
'url': reverse('authentication:dingtalk-qr-login'),
'logo': static('img/login_dingtalk_logo.png')
},
{
'name': _('FeiShu'),
'enabled': settings.AUTH_FEISHU,
'url': reverse('authentication:feishu-qr-login'),
'logo': static('img/login_feishu_logo.png')
},
{
'name': 'Lark',
'enabled': settings.AUTH_LARK,
'url': reverse('authentication:lark-qr-login'),
'logo': static('img/login_lark_logo.png')
},
{
'name': _('Slack'),
'enabled': settings.AUTH_SLACK,
'url': reverse('authentication:slack-qr-login'),
'logo': static('img/login_slack_logo.png')
},
{
'name': _("Passkey"),
'enabled': settings.AUTH_PASSKEY,
'url': reverse('api-auth:passkey-login'),
'logo': static('img/login_passkey.png')
}
]

View File

@ -14,7 +14,6 @@ from django.contrib.auth import login as auth_login, logout as auth_logout
from django.db import IntegrityError
from django.http import HttpRequest
from django.shortcuts import reverse, redirect
from django.templatetags.static import static
from django.urls import reverse_lazy
from django.utils.decorators import method_decorator
from django.utils.translation import gettext as _, get_language
@ -25,13 +24,14 @@ from django.views.generic.base import TemplateView, RedirectView
from django.views.generic.edit import FormView
from common.const import Language
from common.utils import FlashMessageUtil, static_or_direct, safe_next_url
from common.utils import FlashMessageUtil, safe_next_url
from users.utils import (
redirect_user_first_login_or_index
)
from .. import mixins, errors
from ..const import RSA_PRIVATE_KEY, RSA_PUBLIC_KEY
from ..forms import get_user_login_form_cls
from ..utils import get_auth_methods
__all__ = [
'UserLoginView', 'UserLogoutView',
@ -46,73 +46,17 @@ class UserLoginContextMixin:
def get_support_auth_methods(self):
query_string = self.request.GET.urlencode()
auth_methods = [
{
'name': 'OpenID',
'enabled': settings.AUTH_OPENID,
'url': f"{reverse('authentication:openid:login')}?{query_string}",
'logo': static('img/login_oidc_logo.png'),
'auto_redirect': True # 是否支持自动重定向
},
{
'name': 'CAS',
'enabled': settings.AUTH_CAS,
'url': f"{reverse('authentication:cas:cas-login')}?{query_string}",
'logo': static('img/login_cas_logo.png'),
'auto_redirect': True
},
{
'name': 'SAML2',
'enabled': settings.AUTH_SAML2,
'url': f"{reverse('authentication:saml2:saml2-login')}?{query_string}",
'logo': static('img/login_saml2_logo.png'),
'auto_redirect': True
},
{
'name': settings.AUTH_OAUTH2_PROVIDER,
'enabled': settings.AUTH_OAUTH2,
'url': f"{reverse('authentication:oauth2:login')}?{query_string}",
'logo': static_or_direct(settings.AUTH_OAUTH2_LOGO_PATH),
'auto_redirect': True
},
{
'name': _('WeCom'),
'enabled': settings.AUTH_WECOM,
'url': f"{reverse('authentication:wecom-qr-login')}?{query_string}",
'logo': static('img/login_wecom_logo.png'),
},
{
'name': _('DingTalk'),
'enabled': settings.AUTH_DINGTALK,
'url': f"{reverse('authentication:dingtalk-qr-login')}?{query_string}",
'logo': static('img/login_dingtalk_logo.png')
},
{
'name': _('FeiShu'),
'enabled': settings.AUTH_FEISHU,
'url': f"{reverse('authentication:feishu-qr-login')}?{query_string}",
'logo': static('img/login_feishu_logo.png')
},
{
'name': 'Lark',
'enabled': settings.AUTH_LARK,
'url': f"{reverse('authentication:lark-qr-login')}?{query_string}",
'logo': static('img/login_lark_logo.png')
},
{
'name': _('Slack'),
'enabled': settings.AUTH_SLACK,
'url': f"{reverse('authentication:slack-qr-login')}?{query_string}",
'logo': static('img/login_slack_logo.png')
},
{
'name': _("Passkey"),
'enabled': settings.AUTH_PASSKEY,
'url': f"{reverse('api-auth:passkey-login')}?{query_string}",
'logo': static('img/login_passkey.png')
}
]
return [method for method in auth_methods if method['enabled']]
all_methods = get_auth_methods()
methods = []
for method in all_methods:
method = method.copy()
if not method.get('enabled', False):
continue
url = method.get('url', '')
if query_string and url:
method['url'] = '{}?{}'.format(url, query_string)
methods.append(method)
return methods
@staticmethod
def get_support_langs():

View File

@ -40,6 +40,8 @@ class UserLoginMFAView(mixins.AuthMixin, FormView):
if mfa_type == MFAType.Face:
return redirect(reverse('authentication:login-face-capture'))
elif mfa_type == MFAType.Passkey:
return redirect('/api/v1/authentication/passkeys/login/')
return self.do_mfa_check(form, code, mfa_type)
def do_mfa_check(self, form, code, mfa_type):

View File

@ -4,6 +4,7 @@ import os
import sys
from django.apps import AppConfig
from django.db import close_old_connections
class CommonConfig(AppConfig):
@ -21,3 +22,4 @@ class CommonConfig(AppConfig):
if not os.environ.get('DJANGO_DEBUG_SHELL'):
django_ready.send(CommonConfig)
close_old_connections()

View File

@ -111,7 +111,7 @@ class Cache(metaclass=CacheType):
value = self[field].to_internal_value(v.decode())
internal_data[field] = value
else:
logger.warn(f'Cache got invalid field: '
logger.warning(f'Cache got invalid field: '
f'key={self.key} '
f'invalid_field={field} '
f'valid_fields={self.field_names}')

View File

@ -3,7 +3,6 @@ import pycountry
from django.db import models
from django.utils.translation import gettext_lazy as _
from phonenumbers import PhoneMetadata
from common.utils import lazyproperty
ADMIN = 'Admin'
USER = 'User'
@ -77,6 +76,7 @@ class Language(models.TextChoices):
pt_br = 'pt-br', 'Português (Brasil)'
es = 'es', 'Español'
ru = 'ru', 'Русский'
ko = 'ko', '한국어'
@classmethod
def get_code_mapper(cls):

View File

@ -56,7 +56,14 @@ def close_old_connections():
@contextmanager
def safe_db_connection():
def safe_db_connection(auto_close=True):
close_old_connections()
yield
close_old_connections()
@contextmanager
def safe_atomic_db_connection(auto_close=False):
in_atomic_block = connection.in_atomic_block # 当前是否处于事务中
autocommit = transaction.get_autocommit() # 是否启用了自动提交
created = False
@ -69,8 +76,7 @@ def safe_db_connection():
yield
finally:
# 如果不是事务中API 请求中可能需要提交事务),则关闭连接
if created and not in_atomic_block and autocommit:
print("close connection in safe_db_connection")
if auto_close or (created and not in_atomic_block and autocommit):
close_old_connections()

View File

@ -11,7 +11,7 @@ from functools import wraps
from django.db import transaction
from .db.utils import open_db_connection, safe_db_connection
from .db.utils import open_db_connection, safe_atomic_db_connection
from .utils import logger
@ -318,7 +318,7 @@ def bulk_handle(handler, batch_size=50, timeout=0.5):
if not cache:
return
with tmp_to_org(org_id):
with safe_db_connection():
with safe_atomic_db_connection():
handler(cache)
cache.clear()

View File

@ -34,10 +34,14 @@ class SimpleMetadataWithFilters(SimpleMetadata):
"""
actions = {}
view.raw_action = getattr(view, "action", None)
query_action = request.query_params.get("action", None)
for method in self.methods & set(view.allowed_methods):
if hasattr(view, "action_map"):
view.action = view.action_map.get(method.lower(), view.action)
if query_action and query_action.lower() != method.lower():
continue
view.request = clone_request(request, method)
try:
# Test global permissions

View File

@ -14,6 +14,7 @@ from uuid import UUID
from django.utils.translation import gettext_lazy as _
from django.db.models import QuerySet as DJQuerySet
from django.db.models import Q
from elasticsearch7 import Elasticsearch
from elasticsearch7.helpers import bulk
from elasticsearch7.exceptions import RequestError, SSLError
@ -78,6 +79,10 @@ class ESClientV7(ESClientBase):
def get_sort(cls, field, direction):
return f'{field}:{direction}'
@classmethod
def get_sorts(cls, sorts: list):
return ','.join(sorts)
class ESClientV6(ESClientV7):
@ -99,6 +104,10 @@ class ESClientV8(ESClientBase):
def get_sort(cls, field, direction):
return {field: {'order': direction}}
@classmethod
def get_sorts(cls, sorts: list):
return sorts
def get_es_client_version(**kwargs):
try:
@ -190,8 +199,7 @@ class ES(object):
mappings['aliases'] = {
self.query_index: {}
}
if self.es.indices.exists(index=self.index):
return
try:
self.es.indices.create(index=self.index, body=mappings)
except (RequestError, BadRequestError) as e:
@ -245,6 +253,7 @@ class ES(object):
}
if sort is not None:
search_params['sort'] = sort
logger.info('search_params: {}'.format(search_params))
data = self.es.search(**search_params)
source_data = []
@ -319,10 +328,12 @@ class ES(object):
kwargs = new_kwargs
index_in_field = 'id__in'
keyword_fields = self.keyword_fields
exact_fields = self.exact_fields
match_fields = self.match_fields
match = {}
search = []
exact = {}
index = {}
@ -330,11 +341,17 @@ class ES(object):
index['values'] = kwargs[index_in_field]
for k, v in kwargs.items():
if k in exact_fields:
exact[k] = v
if k in exact_fields.union(keyword_fields):
exact['{}.keyword'.format(k)] = v
elif k in match_fields:
match[k] = v
args = kwargs.get('search', [])
for item in args:
for k, v in item.items():
if k in match_fields:
search.append(item)
# 处理时间
time_field_name, time_range = self.handler_time_field(kwargs)
@ -363,10 +380,12 @@ class ES(object):
body = {
'query': {
'bool': {
'must': [
'must': [],
'should': should + [
{'match': {k: v}} for k, v in match.items()
] + [
{'match': item} for item in search
],
'should': should,
'filter': self.handle_exact_fields(exact) +
[
{
@ -403,6 +422,17 @@ class QuerySet(DJQuerySet):
_method_calls = {k: list(v) for k, v in groupby(self._method_calls, lambda x: x[0])}
return _method_calls
def _grouped_search_args(self, query):
conditions = {}
for q in query:
for c in q.children:
if isinstance(c, Q):
child = self._grouped_search_args(c)
[conditions.setdefault(k, []).extend(v) for k, v in child.items()]
else:
conditions.setdefault(c[0], []).append(c[1])
return conditions
@lazyproperty
def _filter_kwargs(self):
_method_calls = self._grouped_method_calls
@ -410,14 +440,14 @@ class QuerySet(DJQuerySet):
if not filter_calls:
return {}
names, multi_args, multi_kwargs = zip(*filter_calls)
args = {
key: value
for arg in multi_args if arg
for key, value in arg[0].children
}
# input 输入
multi_args = tuple(reduce(lambda x, y: x + y, (sub for sub in multi_args if sub),()))
args = self._grouped_search_args(multi_args)
striped_args = [{k.replace('__icontains', ''): v} for k, values in args.items() for v in values]
kwargs = reduce(lambda x, y: {**x, **y}, multi_kwargs, {})
kwargs.update(args)
striped_kwargs = {}
striped_kwargs = {'search': striped_args}
for k, v in kwargs.items():
k = k.replace('__exact', '')
k = k.replace('__startswith', '')
@ -428,6 +458,7 @@ class QuerySet(DJQuerySet):
@lazyproperty
def _sort(self):
order_by = self._grouped_method_calls.get('order_by')
_sorts = [self._storage.client.get_sort('_score', 'desc')]
if order_by:
for call in reversed(order_by):
fields = call[1]
@ -440,7 +471,10 @@ class QuerySet(DJQuerySet):
direction = 'asc'
field = field.lstrip('-+')
sort = self._storage.client.get_sort(field, direction)
return sort
_sorts.append(sort)
break
sorts = self._storage.client.get_sorts(_sorts)
return sorts
def __execute(self):
_filter_kwargs = self._filter_kwargs
@ -514,4 +548,4 @@ class QuerySet(DJQuerySet):
return iter(self.__execute())
def __len__(self):
return self.count()
return self.count()

View File

@ -155,7 +155,7 @@ class WeCom(RequestMixin):
errcode = data['errcode']
if errcode == ErrorCode.INVALID_CODE:
logger.warn(f'WeCom get_user_id_by_code invalid code: code={code}')
logger.warning(f'WeCom get_user_id_by_code invalid code: code={code}')
return None, None
self._requests.check_errcode_is_0(data)

View File

@ -51,6 +51,21 @@ def date_expired_default():
years = 70
return timezone.now() + timezone.timedelta(days=365 * years)
def user_date_expired_default():
try:
days = int(settings.USER_DEFAULT_EXPIRED_DAYS)
except TypeError:
days = 25550
return timezone.now() + timezone.timedelta(days=days)
def asset_permission_date_expired_default():
try:
days = int(settings.ASSET_PERMISSION_DEFAULT_EXPIRED_DAYS)
except TypeError:
days = 25550
return timezone.now() + timezone.timedelta(days=days)
def union_queryset(*args, base_queryset=None):
if len(args) == 1:

View File

@ -1,3 +0,0 @@
version https://git-lfs.github.com/spec/v1
oid sha256:860b4d38beff81667c64da41c026a7dd28c3c93a28ae61fefaa7c26875f35638
size 73906864

View File

@ -1,3 +0,0 @@
version https://git-lfs.github.com/spec/v1
oid sha256:c5119fd8911a107a7112422ade326766fe3d9538ac15bca06e3c622191c84e18
size 61086554

View File

@ -1,3 +0,0 @@
version https://git-lfs.github.com/spec/v1
oid sha256:b82b874152c798dda407ffe7544e1f5ec67efa1f5c334efc0d3893b8053b4be1
size 3649897

View File

@ -8,13 +8,14 @@ from .const import RED, GREEN, RESET
class BaseTranslateManager:
bulk_size = 15
SEPARATOR = "<SEP>"
SEPARATOR = "<-SEP->"
LANG_MAPPER = {
'ja': 'Japanese',
'zh_Hant': 'Traditional Chinese',
'pt_BR': 'Portuguese (Brazil)',
'es': 'Spanish',
'ru': 'Russian',
'ko': 'Korean',
}
def __init__(self, dir_path, oai_trans_instance):

85
apps/i18n/chen/ko.json Normal file
View File

@ -0,0 +1,85 @@
{
"ACLRejectError": "이 명령은 실행할 수 없습니다",
"AffectedRows": "영향을 받은 행 수",
"AlreadyFirstPageError": "이미 첫 페이지입니다.",
"AlreadyLastPageError": "이미 마지막 페이지입니다.",
"Cancel": "취소",
"ChangeContextError": "콘텍스트 전환 실패",
"CommandReview": "명령어 검토",
"CommandReviewMessage": "귀하가 입력한 명령은 검토 후 실행해야 합니다. 검토 요청을 하시겠습니까?",
"CommandReviewRejectBy": "명령 검토가 %s에 의해 거부되었습니다.",
"CommandReviewTimeoutError": "명령 재검토 시간 초과",
"CommandWarningDialogMessage": "귀하가 실행한 명령은 위험이 있습니다. 경고 알림이 관리자에게 전송됩니다. 계속하시겠습니까?",
"Confirm": "확인",
"ConnectError": "연결 실패",
"ConnectSuccess": "연결 성공",
"Connected": "연결됨",
"Copy": "복사",
"CopyFailed": "복사 실패",
"CopyNotAllowed": "복사가 허용되지 않습니다. 관리자에게 권한을 요청해 주십시오!",
"CopySucceeded": "복사 성공",
"Current": "현재",
"DatabaseExplorer": "데이터베이스 탐색기",
"DatabaseProperties": "데이터 원본 속성",
"DownloadNotAllowed": "다운로드가 허용되지 않습니다. 관리자에게 권한을 활성화해 달라고 연락하십시오!",
"DriverClass": "드라이버 클래스",
"DriverVersion": "드라이버 버전",
"ErrorMessage": "오류 메시지",
"ExecuteError": "실행 실패",
"ExecuteSuccess": "실행 성공",
"ExecutionCanceled": "실행이 취소되었습니다.",
"ExportALL": "모든 데이터 내보내기",
"ExportAll": "모두 내보내기",
"ExportCurrent": "현재 페이지 내보내기",
"ExportData": "데이터 내보내기",
"FetchError": "데이터를 가져오는 데 실패했습니다",
"Format": "형식",
"FormatHotKey": "형식화 (Ctrl + L)",
"InitializeDatasource": "데이터 소스 초기화",
"InitializeDatasourceFailed": "데이터 소스 초기화 실패",
"InitializingDatasourceMessage": "데이터 소스를 초기화하는 중입니다. 잠시만 기다려 주십시오...",
"InsertStatement": "삽입 문",
"JDBCURL": "JDBC URL",
"LogOutput": "로그 출력",
"Name": "이름",
"NewQuery": "새 쿼리 만들기",
"NoPermissionError": "이 작업을 수행할 권한이 없습니다.",
"NumRow": "{num} 행",
"Open": "열기",
"OverMaxIdleTimeError": "이 세션의 유휴 시간이 %d 분을 초과하여 종료되었습니다",
"OverMaxSessionTimeError": "이 세션의 시간이 %d 시간을 초과하여 종료되었습니다.",
"ParseError": "분석 실패",
"PasteNotAllowed": "붙여넣기가 허용되지 않습니다. 관리자에게 문의하여 권한을 요청하십시오!",
"PermissionAlreadyExpired": "권한이 만료되었습니다.",
"PermissionExpiredDialogMessage": "권한이 만료되었습니다. 세션은 10분 후 만료될 예정입니다. 관리자가 연장할 수 있도록 신속하게 연락해 주시기 바랍니다.",
"PermissionExpiredDialogTitle": "권한이 만료되었습니다.",
"PermissionsExpiredOn": "이 세션에 연결된 권한이 %s에 만료되었습니다",
"Properties": "속성",
"Refresh": "갱신",
"Run": "실행",
"RunHotKey": "실행하기 (Ctrl + Enter)",
"RunSelected": "선택 실행",
"Save": "저장",
"SaveSQL": "SQL 저장",
"SaveSucceed": "저장 성공",
"Scope": "범위",
"SelectSQL": "SQL 선택",
"SessionClosedBy": "세션이 %s에 의해 종료되었습니다",
"SessionFinished": "세션이 종료되었습니다",
"SessionLockedError": "현재 세션이 잠겨 있어 명령을 계속 실행할 수 없습니다",
"SessionLockedMessage": "현재 세션이 %s에 의해 잠겨 있어 명령어를 계속 실행할 수 없습니다",
"SessionUnlockedMessage": "이 세션은 %s에 의해 해제되었으며, 명령을 계속 실행할 수 있습니다",
"ShowProperties": "속성",
"StopHotKey": "중지 (Ctrl + C)",
"Submit": "제출",
"Total": "총계",
"Type": "형식",
"UpdateStatement": "업데이트 명령어",
"User": "사용자",
"UserCancelCommandReviewError": "사용자가 명령 승인을 취소했습니다",
"Version": "버전",
"ViewData": "데이터 보기",
"WaitCommandReviewMessage": "승인 요청이 발송되었습니다, 승인 결과를 기다려 주세요",
"Warning": "경고",
"initializingDatasourceFailedMessage": "연결 실패, 데이터베이스 연결 구성이 올바른지 확인해 주세요"
}

View File

@ -1,85 +1,85 @@
{
"ACLRejectError": "Эта команда не может быть выполнена.",
"AffectedRows": "Количество затронутых строк",
"AlreadyFirstPageError": "Это уже первая страница",
"ACLRejectError": "Выполнение команды запрещено правилом",
"AffectedRows": "Затронутые строки",
"AlreadyFirstPageError": "Это первая страница",
"AlreadyLastPageError": "Это последняя страница",
"Cancel": "Отменить",
"ChangeContextError": "Сменить контекст не удалось",
"CommandReview": "Команда проверки",
"CommandReviewMessage": "Команда, которую вы ввели, требует подтверждения перед выполнением, хотите ли вы начать процесс проверки?",
"ChangeContextError": "Ошибка смены контекста",
"CommandReview": "Проверка команды",
"CommandReviewMessage": "Введенная вами команда требует проверки перед выполнением, хотите отправить запрос на проверку?",
"CommandReviewRejectBy": "Команда проверки отклонена %s",
"CommandReviewTimeoutError": "Тайм-аут проверки команды",
"CommandWarningDialogMessage": "Выполняемая вами команда содержит риски, уведомление будет отправлено администратору. Продолжить?",
"CommandWarningDialogMessage": "Команда, которую вы хотите выполнить, содержит риски, уведомление будет отправлено администратору. Продолжить?",
"Confirm": "Подтвердить",
"ConnectError": "Ошибка соединения",
"ConnectSuccess": "Соединение успешно",
"ConnectError": "Ошибка подключения",
"ConnectSuccess": "Успешно подключено",
"Connected": "Подключено",
"Copy": "Копировать.",
"Copy": "Копировать",
"CopyFailed": "Копирование не удалось",
"CopyNotAllowed": "Копирование запрещено, пожалуйста, свяжитесь с администратором для получения прав!",
"CopySucceeded": "Копирование выполнено успешно",
"Current": "Текущая",
"DatabaseExplorer": "Обозреватель базы данных.",
"CopySucceeded": "Успешное копирование",
"Current": "Текущий",
"DatabaseExplorer": "Проводник баз данных",
"DatabaseProperties": "Свойства источника данных",
"DownloadNotAllowed": "Скачать нельзя, пожалуйста, свяжитесь с администратором для получения доступа!",
"DownloadNotAllowed": "Загрузка не разрешена, пожалуйста, свяжитесь с администратором для получения доступа!",
"DriverClass": "Класс драйвера",
"DriverVersion": "Версия драйвера",
"ErrorMessage": "Сообщение об ошибке",
"ExecuteError": "Выполнение не удалось",
"ExecuteSuccess": "Выполнено успешно",
"ExecutionCanceled": "Выполнение отменено.",
"ExecutionCanceled": "Выполнение отменено",
"ExportALL": "Экспортировать все данные",
"ExportAll": "Экспортировать все",
"ExportCurrent": "Экспорт текущей страницы",
"ExportData": "Экспорт данных.",
"ExportData": "Экспорт данных",
"FetchError": "Не удалось получить данные",
"Format": "Формат.",
"Format": "Формат",
"FormatHotKey": "Форматировать (Ctrl + L)",
"InitializeDatasource": "Инициализация источника данных",
"InitializeDatasourceFailed": "Не удалось инициализировать источник данных",
"InitializingDatasourceMessage": "Инициализация источника данных, пожалуйста, подождите...",
"InsertStatement": "Вставить оператор",
"InsertStatement": "Инструкция INSERT",
"JDBCURL": "JDBC URL",
"LogOutput": "Журнал вывода",
"Name": "Название",
"NewQuery": "Создать новый запрос",
"NoPermissionError": "Нет прав на выполнение данного действия",
"NewQuery": "Новый запрос",
"NoPermissionError": "Нет прав на выполнение этого действия",
"NumRow": "{num} строк",
"Open": "Открыть",
"OverMaxIdleTimeError": "Так как эта сессия простаивает более %d минут, она была закрыта.",
"OverMaxSessionTimeError": "Так как эта сессия превышает %d часов, она была закрыта.",
"OverMaxIdleTimeError": "Сессия была закрыта, так как время простоя превысило %d минут",
"OverMaxSessionTimeError": "Сессия была закрыта, так как продолжительность сессии превысила %d часов",
"ParseError": "Ошибка анализа",
"PasteNotAllowed": "Вставка не разрешена, пожалуйста, свяжитесь с администратором для получения прав!",
"PermissionAlreadyExpired": "Срок действия авторизации истек",
"PermissionExpiredDialogMessage": "Авторизация истекла, сессия истечёт через десять минут, пожалуйста, свяжитесь с администратором для продления",
"PermissionExpiredDialogTitle": "Авторизация истекла",
"PermissionsExpiredOn": "Права, связанные с этой сессией, истекли %s",
"PermissionAlreadyExpired": "Срок действия разрешения истек",
"PermissionExpiredDialogMessage": "Разрешение истекло, сессия истечет через 10 минут, пожалуйста, свяжитесь с администратором для продления",
"PermissionExpiredDialogTitle": "Разрешение истекло",
"PermissionsExpiredOn": "Разрешение для этой сессии истекло %s",
"Properties": "Свойства",
"Refresh": "Обновить",
"Run": "Запустить",
"RunHotKey": "Запуск (Ctrl + Enter).",
"RunSelected": "Запустить выделенное.",
"Run": "Запуск",
"RunHotKey": "Запуск (Ctrl + Enter)",
"RunSelected": "Запустить выбранные",
"Save": "Сохранить",
"SaveSQL": "Сохранить SQL.",
"SaveSucceed": "Сохранение успешно",
"Scope": "Диапазон",
"SaveSQL": "Сохранить SQL",
"SaveSucceed": "Успешно сохранено",
"Scope": "Область",
"SelectSQL": "Выбрать SQL",
"SessionClosedBy": "Сессия была закрыта %s",
"SessionFinished": "Сессия завершена",
"SessionLockedError": "Текущая сессия заблокирована, команда не может быть продолжена",
"SessionLockedMessage": "Эта сессия была заблокирована %s и команда не может быть продолжена",
"SessionLockedError": "Текущая сессия заблокирована, невозможно продолжить выполнение команды",
"SessionLockedMessage": " Эта сессия была заблокирована %s, невозможно продолжить выполнение команды",
"SessionUnlockedMessage": "Эта сессия была разблокирована %s, можно продолжить выполнение команды",
"ShowProperties": "Свойства",
"StopHotKey": "Остановить (Ctrl + C)",
"Submit": "Отправить",
"Total": "Всего:",
"Total": "Всего",
"Type": "Тип",
"UpdateStatement": "Обновить запрос",
"User": "Пользователь.",
"User": "Пользователь",
"UserCancelCommandReviewError": "Пользователь отменил проверку команды",
"Version": "Версия",
"ViewData": "Просмотреть данные",
"WaitCommandReviewMessage": "Запрос на пересмотр инициирован, пожалуйста, ожидайте результата пересмотра",
"ViewData": "Просмотр данных",
"WaitCommandReviewMessage": "Запрос на проверку отправлен, пожалуйста, ожидайте результатов проверки",
"Warning": "Предупреждение",
"initializingDatasourceFailedMessage": "Ошибка подключения, пожалуйста, проверьте правильность конфигурации подключения к базе данных."
"initializingDatasourceFailedMessage": "Ошибка подключения, пожалуйста, проверьте настройки подключения к базе данных"
}

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

View File

@ -19,85 +19,87 @@ msgstr ""
"Plural-Forms: nplurals=2; plural=(n != 1);\n"
#: static/js/jumpserver.js:267
msgid "Update is successful!"
msgstr ""
msgstr "Actualización exitosa"
#: static/js/jumpserver.js:269
msgid "An unknown error occurred while updating.."
msgstr ""
msgstr "Se produjo un error desconocido durante la actualización"
#: static/js/jumpserver.js:342
msgid "Not found"
msgstr ""
msgstr "No encontrado"
#: static/js/jumpserver.js:344
msgid "Server error"
msgstr ""
msgstr "Error del servidor"
#: static/js/jumpserver.js:346 static/js/jumpserver.js:384
#: static/js/jumpserver.js:386
msgid "Error"
msgstr ""
msgstr "Error"
#: static/js/jumpserver.js:352 static/js/jumpserver.js:393
msgid "Delete the success"
msgstr ""
msgstr "Eliminado exitosamente"
#: static/js/jumpserver.js:359
msgid "Are you sure about deleting it?"
msgstr ""
msgstr "¿Estás seguro que deseas eliminar?"
#: static/js/jumpserver.js:363 static/js/jumpserver.js:404
msgid "Cancel"
msgstr ""
msgstr "Cancelar"
#: static/js/jumpserver.js:365 static/js/jumpserver.js:406
msgid "Confirm"
msgstr ""
msgstr "Confirmar"
#: static/js/jumpserver.js:384
msgid ""
"The organization contains undeleted information. Please try again after "
"deleting"
msgstr ""
msgstr "La organización contiene información no eliminada, por favor elimínela y vuelva a intentarlo"
#: static/js/jumpserver.js:386
msgid ""
"Do not perform this operation under this organization. Try again after "
"switching to another organization"
msgstr ""
msgstr "No realice esta operación en esta organización, cambie a otra organización e inténtelo de nuevo"
#: static/js/jumpserver.js:400
msgid ""
"Please ensure that the following information in the organization has been "
"deleted"
msgstr ""
msgstr "Asegúrese de que se haya eliminado la siguiente información de su organización"
#: static/js/jumpserver.js:401
msgid ""
"User list、User group、Asset list、Domain list、Admin user、System user、"
"Labels、Asset permission"
msgstr ""
"Lista de usuarios, grupo de usuarios, lista de activos, lista de dominios, usuario privilegiado, usuario del sistema, gestión de etiquetas, autorización de activos"
"regla"
#: static/js/jumpserver.js:650
msgid "Unknown error occur"
msgstr ""
msgstr "Se produjo un error desconocido"
#: static/js/jumpserver.js:902
msgid "Password minimum length {N} bits"
msgstr ""
msgstr "La longitud mínima de la contraseña es {N} caracteres"
#: static/js/jumpserver.js:903
msgid "Must contain capital letters"
msgstr ""
msgstr "Debe contener letras mayúsculas"
#: static/js/jumpserver.js:904
msgid "Must contain lowercase letters"
msgstr ""
msgstr "Debe contener letras minúsculas"
#: static/js/jumpserver.js:905
msgid "Must contain numeric characters"
msgstr ""
msgstr "Debe contener caracteres numéricos"
#: static/js/jumpserver.js:906
msgid "Must contain special characters"
msgstr ""
msgstr "Debe contener caracteres especiales"

File diff suppressed because it is too large Load Diff

View File

@ -107,45 +107,3 @@ msgstr "数字を含める必要があります。"
#: static/js/jumpserver.js:906
msgid "Must contain special characters"
msgstr "特殊文字を含める必要があります"
#~ msgid "Loading"
#~ msgstr "読み込み中"
#~ msgid "Search"
#~ msgstr "検索"
#, javascript-format
#~ msgid "Selected item %d"
#~ msgstr "選択したアイテム % d"
#~ msgid "Per page _MENU_"
#~ msgstr "各ページ _MENU_"
#~ msgid ""
#~ "Displays the results of items _START_ to _END_; A total of _TOTAL_ entries"
#~ msgstr ""
#~ "アイテムの結果を表示します _START_ に着く _END_; 合計 _TOTAL_ エントリ"
#~ msgid "No match"
#~ msgstr "一致しません"
#~ msgid "No record"
#~ msgstr "記録なし"
#~ msgid "Export failed"
#~ msgstr "エクスポートに失敗しました"
#~ msgid "Import Success"
#~ msgstr "インポートの成功"
#~ msgid "Update Success"
#~ msgstr "更新の成功"
#~ msgid "Count"
#~ msgstr "カウント"
#~ msgid "Import failed"
#~ msgstr "インポートに失敗しました"
#~ msgid "Update failed"
#~ msgstr "更新に失敗しました"

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,105 @@
# SOME DESCRIPTIVE TITLE.
# Copyright (C) YEAR THE PACKAGE'S COPYRIGHT HOLDER
# This file is distributed under the same license as the PACKAGE package.
# FIRST AUTHOR <EMAIL@ADDRESS>, YEAR.
#
#, fuzzy
msgid ""
msgstr ""
"Project-Id-Version: PACKAGE VERSION\n"
"Report-Msgid-Bugs-To: \n"
"POT-Creation-Date: 2025-02-24 14:25+0800\n"
"PO-Revision-Date: YEAR-MO-DA HO:MI+ZONE\n"
"Last-Translator: FULL NAME <EMAIL@ADDRESS>\n"
"Language-Team: LANGUAGE <LL@li.org>\n"
"Language: \n"
"MIME-Version: 1.0\n"
"Content-Type: text/plain; charset=UTF-8\n"
"Content-Transfer-Encoding: 8bit\n"
"Plural-Forms: nplurals=2; plural=(n > 1);\n"
#: static/js/jumpserver.js:267
msgid "Update is successful!"
msgstr "업데이트 성공"
#: static/js/jumpserver.js:269
msgid "An unknown error occurred while updating.."
msgstr "업데이트 중 알 수 없는 오류가 발생했습니다."
#: static/js/jumpserver.js:342
msgid "Not found"
msgstr "찾을 수 없음"
#: static/js/jumpserver.js:344
msgid "Server error"
msgstr "서버 오류"
#: static/js/jumpserver.js:346 static/js/jumpserver.js:384
#: static/js/jumpserver.js:386
msgid "Error"
msgstr "오류"
#: static/js/jumpserver.js:352 static/js/jumpserver.js:393
msgid "Delete the success"
msgstr "성공적으로 삭제되었습니다"
#: static/js/jumpserver.js:359
msgid "Are you sure about deleting it?"
msgstr "정말 삭제하시겠습니까?"
#: static/js/jumpserver.js:363 static/js/jumpserver.js:404
msgid "Cancel"
msgstr "취소"
#: static/js/jumpserver.js:365 static/js/jumpserver.js:406
msgid "Confirm"
msgstr "확인"
#: static/js/jumpserver.js:384
msgid ""
"The organization contains undeleted information. Please try again after "
"deleting"
msgstr "조직에 삭제되지 않은 정보가 포함되어 있습니다. 삭제하고 다시 시도하세요."
#: static/js/jumpserver.js:386
msgid ""
"Do not perform this operation under this organization. Try again after "
"switching to another organization"
msgstr "이 조직에서는 이 작업을 수행하지 마십시오. 다른 조직으로 전환한 후 다시 시도하십시오."
#: static/js/jumpserver.js:400
msgid ""
"Please ensure that the following information in the organization has been "
"deleted"
msgstr "조직에서 다음 정보가 삭제되었는지 확인하세요"
#: static/js/jumpserver.js:401
msgid ""
"User list、User group、Asset list、Domain list、Admin user、System user、"
"Labels、Asset permission"
msgstr "사용자 목록, 사용자 그룹, 자산 목록, 도메인 목록, 권한이 있는 사용자, 시스템 사용자, 태그 관리, 자산 권한 부여"
"규칙"
#: static/js/jumpserver.js:650
msgid "Unknown error occur"
msgstr "알 수 없는 오류가 발생했습니다"
#: static/js/jumpserver.js:902
msgid "Password minimum length {N} bits"
msgstr "최소 비밀번호 길이는 {N}자입니다"
#: static/js/jumpserver.js:903
msgid "Must contain capital letters"
msgstr "대문자를 포함해야 합니다"
#: static/js/jumpserver.js:904
msgid "Must contain lowercase letters"
msgstr "소문자를 포함해야 합니다"
#: static/js/jumpserver.js:905
msgid "Must contain numeric characters"
msgstr "숫자를 포함해야 합니다"
#: static/js/jumpserver.js:906
msgid "Must contain special characters"
msgstr "특수문자를 포함해야 합니다"

File diff suppressed because it is too large Load Diff

View File

@ -20,85 +20,87 @@ msgstr ""
#: static/js/jumpserver.js:267
msgid "Update is successful!"
msgstr ""
msgstr "Atualização bem-sucedida"
#: static/js/jumpserver.js:269
msgid "An unknown error occurred while updating.."
msgstr ""
msgstr "Ocorreu um erro desconhecido durante a atualização"
#: static/js/jumpserver.js:342
msgid "Not found"
msgstr ""
msgstr "Não encontrado"
#: static/js/jumpserver.js:344
msgid "Server error"
msgstr ""
msgstr "Erro do servidor"
#: static/js/jumpserver.js:346 static/js/jumpserver.js:384
#: static/js/jumpserver.js:386
msgid "Error"
msgstr ""
msgstr "Erro"
#: static/js/jumpserver.js:352 static/js/jumpserver.js:393
msgid "Delete the success"
msgstr ""
msgstr "Excluído com sucesso"
#: static/js/jumpserver.js:359
msgid "Are you sure about deleting it?"
msgstr ""
msgstr "Tem certeza de que deseja excluir?"
#: static/js/jumpserver.js:363 static/js/jumpserver.js:404
msgid "Cancel"
msgstr ""
msgstr "Cancelar"
#: static/js/jumpserver.js:365 static/js/jumpserver.js:406
msgid "Confirm"
msgstr ""
msgstr "Confirmar"
#: static/js/jumpserver.js:384
msgid ""
"The organization contains undeleted information. Please try again after "
"deleting"
msgstr ""
msgstr "A organização contém informações não excluídas, exclua-as e tente novamente"
#: static/js/jumpserver.js:386
msgid ""
"Do not perform this operation under this organization. Try again after "
"switching to another organization"
msgstr ""
msgstr "Não execute esta operação nesta organização, mude para outra organização e tente novamente"
#: static/js/jumpserver.js:400
msgid ""
"Please ensure that the following information in the organization has been "
"deleted"
msgstr ""
msgstr "Certifique-se de que as seguintes informações da sua organização foram excluídas"
#: static/js/jumpserver.js:401
msgid ""
"User list、User group、Asset list、Domain list、Admin user、System user、"
"Labels、Asset permission"
msgstr ""
"Lista de usuários, grupo de usuários, lista de ativos, lista de domínios, usuário privilegiado, usuário do sistema, gerenciamento de tags, autorização de ativos"
"regra"
#: static/js/jumpserver.js:650
msgid "Unknown error occur"
msgstr ""
msgstr "Ocorreu um erro desconhecido"
#: static/js/jumpserver.js:902
msgid "Password minimum length {N} bits"
msgstr ""
msgstr "O comprimento mínimo da senha é {N} caracteres"
#: static/js/jumpserver.js:903
msgid "Must contain capital letters"
msgstr ""
msgstr "Deve conter letras maiúsculas"
#: static/js/jumpserver.js:904
msgid "Must contain lowercase letters"
msgstr ""
msgstr "Deve conter letras minúsculas"
#: static/js/jumpserver.js:905
msgid "Must contain numeric characters"
msgstr ""
msgstr "Deve conter caracteres numéricos"
#: static/js/jumpserver.js:906
msgid "Must contain special characters"
msgstr ""
msgstr "Deve conter caracteres especiais"

File diff suppressed because it is too large Load Diff

View File

@ -21,85 +21,85 @@ msgstr ""
"(n%100>=11 && n%100<=14)? 2 : 3);\n"
#: static/js/jumpserver.js:267
msgid "Update is successful!"
msgstr ""
msgstr "Обновление успешно выполнено"
#: static/js/jumpserver.js:269
msgid "An unknown error occurred while updating.."
msgstr ""
msgstr "Произошла неизвестная ошибка при обновлении"
#: static/js/jumpserver.js:342
msgid "Not found"
msgstr ""
msgstr "Не найдено"
#: static/js/jumpserver.js:344
msgid "Server error"
msgstr ""
msgstr "Ошибка сервера"
#: static/js/jumpserver.js:346 static/js/jumpserver.js:384
#: static/js/jumpserver.js:386
msgid "Error"
msgstr ""
msgstr "Ошибка"
#: static/js/jumpserver.js:352 static/js/jumpserver.js:393
msgid "Delete the success"
msgstr ""
msgstr "Удален успешно"
#: static/js/jumpserver.js:359
msgid "Are you sure about deleting it?"
msgstr ""
msgstr "Вы уверены, что хотите удалить?"
#: static/js/jumpserver.js:363 static/js/jumpserver.js:404
msgid "Cancel"
msgstr ""
msgstr "Отмена"
#: static/js/jumpserver.js:365 static/js/jumpserver.js:406
msgid "Confirm"
msgstr ""
msgstr "Подтвердить"
#: static/js/jumpserver.js:384
msgid ""
"The organization contains undeleted information. Please try again after "
"deleting"
msgstr ""
msgstr "Организация содержит неудалённую информацию, пожалуйста, удалите её и повторите попытку"
#: static/js/jumpserver.js:386
msgid ""
"Do not perform this operation under this organization. Try again after "
"switching to another organization"
msgstr ""
msgstr "Не выполняйте эту операцию в этой организации, переключитесь на другую организацию и попробуйте снова"
#: static/js/jumpserver.js:400
msgid ""
"Please ensure that the following information in the organization has been "
"deleted"
msgstr ""
msgstr "Пожалуйста, убедитесь, что следующая информация в вашей организации удалена"
#: static/js/jumpserver.js:401
msgid ""
"User list、User group、Asset list、Domain list、Admin user、System user、"
"Labels、Asset permission"
msgstr ""
msgstr "Список пользователей, группа пользователей, список активов, список доменов, привилегированный пользователь, системный пользователь, управление тегами, авторизация активов правило"
#: static/js/jumpserver.js:650
msgid "Unknown error occur"
msgstr ""
msgstr "Произошла неизвестная ошибка"
#: static/js/jumpserver.js:902
msgid "Password minimum length {N} bits"
msgstr ""
msgstr "Минимальная длина пароля — {N} символов"
#: static/js/jumpserver.js:903
msgid "Must contain capital letters"
msgstr ""
msgstr "Должен содержать заглавные буквы"
#: static/js/jumpserver.js:904
msgid "Must contain lowercase letters"
msgstr ""
msgstr "Должен содержать строчные буквы"
#: static/js/jumpserver.js:905
msgid "Must contain numeric characters"
msgstr ""
msgstr "Должен содержать числовые символы"
#: static/js/jumpserver.js:906
msgid "Must contain special characters"
msgstr ""
msgstr "Должен содержать специальные символы"

Some files were not shown because too many files have changed in this diff Show More