mirror of https://github.com/jumpserver/jumpserver
Bryan
10 hours ago
committed by
GitHub
163 changed files with 6365 additions and 4471 deletions
@ -0,0 +1,24 @@
|
||||
name: Publish Release to Discord |
||||
|
||||
on: |
||||
release: |
||||
types: [published] |
||||
|
||||
jobs: |
||||
send_discord_notification: |
||||
runs-on: ubuntu-latest |
||||
if: startsWith(github.event.release.tag_name, 'v4.') |
||||
steps: |
||||
- name: Send release notification to Discord |
||||
env: |
||||
WEBHOOK_URL: ${{ secrets.DISCORD_CHANGELOG_WEBHOOK }} |
||||
run: | |
||||
# 获取标签名称和 release body |
||||
TAG_NAME="${{ github.event.release.tag_name }}" |
||||
RELEASE_BODY="${{ github.event.release.body }}" |
||||
|
||||
# 使用 jq 构建 JSON 数据,以确保安全传递 |
||||
JSON_PAYLOAD=$(jq -n --arg tag "# JumpServer $TAG_NAME Released! 🚀" --arg body "$RELEASE_BODY" '{content: "\($tag)\n\($body)"}') |
||||
|
||||
# 使用 curl 发送 JSON 数据 |
||||
curl -X POST -H "Content-Type: application/json" -d "$JSON_PAYLOAD" "$WEBHOOK_URL" |
@ -0,0 +1,28 @@
|
||||
name: LLM Code Review |
||||
|
||||
permissions: |
||||
contents: read |
||||
pull-requests: write |
||||
|
||||
on: |
||||
pull_request: |
||||
types: [opened, reopened, synchronize] |
||||
|
||||
jobs: |
||||
llm-code-review: |
||||
runs-on: ubuntu-latest |
||||
steps: |
||||
- uses: fit2cloud/LLM-CodeReview-Action@main |
||||
env: |
||||
GITHUB_TOKEN: ${{ secrets.FIT2CLOUDRD_LLM_CODE_REVIEW_TOKEN }} |
||||
OPENAI_API_KEY: ${{ secrets.ALIYUN_LLM_API_KEY }} |
||||
LANGUAGE: English |
||||
OPENAI_API_ENDPOINT: https://dashscope.aliyuncs.com/compatible-mode/v1 |
||||
MODEL: qwen2-1.5b-instruct |
||||
PROMPT: "Please check the following code differences for any irregularities, potential issues, or optimization suggestions, and provide your answers in English." |
||||
top_p: 1 |
||||
temperature: 1 |
||||
# max_tokens: 10000 |
||||
MAX_PATCH_LENGTH: 10000 |
||||
IGNORE_PATTERNS: "/node_modules,*.md,/dist,/.github" |
||||
FILE_PATTERNS: "*.java,*.go,*.py,*.vue,*.ts,*.js,*.css,*.scss,*.html" |
@ -0,0 +1,70 @@
|
||||
import sys |
||||
from abc import ABC |
||||
|
||||
from common.db.utils import Encryptor |
||||
from common.utils import lazyproperty |
||||
|
||||
current_module = sys.modules[__name__] |
||||
|
||||
__all__ = ['build_entry'] |
||||
|
||||
|
||||
class BaseEntry(ABC): |
||||
|
||||
def __init__(self, instance): |
||||
self.instance = instance |
||||
|
||||
@lazyproperty |
||||
def full_path(self): |
||||
return self.path_spec |
||||
|
||||
@property |
||||
def path_spec(self): |
||||
raise NotImplementedError |
||||
|
||||
def to_internal_data(self): |
||||
secret = getattr(self.instance, '_secret', None) |
||||
if secret is not None: |
||||
secret = Encryptor(secret).encrypt() |
||||
return secret |
||||
|
||||
@staticmethod |
||||
def to_external_data(secret): |
||||
if secret is not None: |
||||
secret = Encryptor(secret).decrypt() |
||||
return secret |
||||
|
||||
|
||||
class AccountEntry(BaseEntry): |
||||
|
||||
@property |
||||
def path_spec(self): |
||||
# 长度 0-127 |
||||
account_id = str(self.instance.id)[:18] |
||||
path = f'assets-{self.instance.asset_id}-accounts-{account_id}' |
||||
return path |
||||
|
||||
|
||||
class AccountTemplateEntry(BaseEntry): |
||||
|
||||
@property |
||||
def path_spec(self): |
||||
path = f'account-templates-{self.instance.id}' |
||||
return path |
||||
|
||||
|
||||
class HistoricalAccountEntry(BaseEntry): |
||||
|
||||
@property |
||||
def path_spec(self): |
||||
path = f'accounts-{self.instance.instance.id}-histories-{self.instance.history_id}' |
||||
return path |
||||
|
||||
|
||||
def build_entry(instance) -> BaseEntry: |
||||
class_name = instance.__class__.__name__ |
||||
entry_class_name = f'{class_name}Entry' |
||||
entry_class = getattr(current_module, entry_class_name, None) |
||||
if not entry_class: |
||||
raise Exception(f'Entry class {entry_class_name} is not found') |
||||
return entry_class(instance) |
@ -0,0 +1,57 @@
|
||||
from common.db.utils import get_logger |
||||
from .entries import build_entry |
||||
from .service import AZUREVaultClient |
||||
from ..base import BaseVault |
||||
|
||||
from ...const import VaultTypeChoices |
||||
|
||||
logger = get_logger(__name__) |
||||
|
||||
__all__ = ['Vault'] |
||||
|
||||
|
||||
class Vault(BaseVault): |
||||
type = VaultTypeChoices.azure |
||||
|
||||
def __init__(self, *args, **kwargs): |
||||
super().__init__(*args, **kwargs) |
||||
self.client = AZUREVaultClient( |
||||
vault_url=kwargs.get('VAULT_AZURE_HOST'), |
||||
tenant_id=kwargs.get('VAULT_AZURE_TENANT_ID'), |
||||
client_id=kwargs.get('VAULT_AZURE_CLIENT_ID'), |
||||
client_secret=kwargs.get('VAULT_AZURE_CLIENT_SECRET') |
||||
) |
||||
|
||||
def is_active(self): |
||||
return self.client.is_active() |
||||
|
||||
def _get(self, instance): |
||||
entry = build_entry(instance) |
||||
secret = self.client.get(name=entry.full_path) |
||||
secret = entry.to_external_data(secret) |
||||
return secret |
||||
|
||||
def _create(self, instance): |
||||
entry = build_entry(instance) |
||||
secret = entry.to_internal_data() |
||||
self.client.create(name=entry.full_path, secret=secret) |
||||
|
||||
def _update(self, instance): |
||||
entry = build_entry(instance) |
||||
secret = entry.to_internal_data() |
||||
self.client.update(name=entry.full_path, secret=secret) |
||||
|
||||
def _delete(self, instance): |
||||
entry = build_entry(instance) |
||||
self.client.delete(name=entry.full_path) |
||||
|
||||
def _clean_db_secret(self, instance): |
||||
instance.is_sync_metadata = False |
||||
instance.mark_secret_save_to_vault() |
||||
|
||||
def _save_metadata(self, instance, metadata): |
||||
try: |
||||
entry = build_entry(instance) |
||||
self.client.update_metadata(name=entry.full_path, metadata=metadata) |
||||
except Exception as e: |
||||
logger.error(f'save metadata error: {e}') |
@ -0,0 +1,59 @@
|
||||
# -*- coding: utf-8 -*- |
||||
# |
||||
from azure.core.exceptions import ResourceNotFoundError, ClientAuthenticationError |
||||
from azure.identity import ClientSecretCredential |
||||
from azure.keyvault.secrets import SecretClient |
||||
|
||||
from common.utils import get_logger |
||||
|
||||
logger = get_logger(__name__) |
||||
|
||||
__all__ = ['AZUREVaultClient'] |
||||
|
||||
|
||||
class AZUREVaultClient(object): |
||||
|
||||
def __init__(self, vault_url, tenant_id, client_id, client_secret): |
||||
authentication_endpoint = 'https://login.microsoftonline.com/' \ |
||||
if ('azure.net' in vault_url) else 'https://login.chinacloudapi.cn/' |
||||
|
||||
credentials = ClientSecretCredential( |
||||
client_id=client_id, client_secret=client_secret, tenant_id=tenant_id, authority=authentication_endpoint |
||||
) |
||||
self.client = SecretClient(vault_url=vault_url, credential=credentials) |
||||
|
||||
def is_active(self): |
||||
try: |
||||
self.client.set_secret('jumpserver', '666') |
||||
except (ResourceNotFoundError, ClientAuthenticationError) as e: |
||||
logger.error(str(e)) |
||||
return False, f'Vault is not reachable: {e}' |
||||
else: |
||||
return True, '' |
||||
|
||||
def get(self, name, version=None): |
||||
try: |
||||
secret = self.client.get_secret(name, version) |
||||
return secret.value |
||||
except (ResourceNotFoundError, ClientAuthenticationError) as e: |
||||
logger.error(f'get: {name} {str(e)}') |
||||
return '' |
||||
|
||||
def create(self, name, secret): |
||||
if not secret: |
||||
secret = '' |
||||
self.client.set_secret(name, secret) |
||||
|
||||
def update(self, name, secret): |
||||
if not secret: |
||||
secret = '' |
||||
self.client.set_secret(name, secret) |
||||
|
||||
def delete(self, name): |
||||
self.client.begin_delete_secret(name) |
||||
|
||||
def update_metadata(self, name, metadata: dict): |
||||
try: |
||||
self.client.update_secret_properties(name, tags=metadata) |
||||
except (ResourceNotFoundError, ClientAuthenticationError) as e: |
||||
logger.error(f'update_metadata: {name} {str(e)}') |
@ -0,0 +1,57 @@
|
||||
from django.core.cache import cache |
||||
|
||||
from authentication.mfa.base import BaseMFA |
||||
from django.shortcuts import reverse |
||||
from django.utils.translation import gettext_lazy as _ |
||||
|
||||
from authentication.mixins import MFAFaceMixin |
||||
from common.const import LicenseEditionChoices |
||||
from settings.api import settings |
||||
|
||||
|
||||
class MFAFace(BaseMFA, MFAFaceMixin): |
||||
name = "face" |
||||
display_name = _('Face Recognition') |
||||
placeholder = 'Face Recognition' |
||||
|
||||
def check_code(self, code): |
||||
|
||||
assert self.is_authenticated() |
||||
|
||||
try: |
||||
code = self.get_face_code() |
||||
if not self.user.check_face(code): |
||||
return False, _('Facial comparison failed') |
||||
except Exception as e: |
||||
return False, "{}:{}".format(_('Facial comparison failed'), str(e)) |
||||
return True, '' |
||||
|
||||
def is_active(self): |
||||
if not self.is_authenticated(): |
||||
return True |
||||
return bool(self.user.face_vector) |
||||
|
||||
@staticmethod |
||||
def global_enabled(): |
||||
return settings.XPACK_LICENSE_IS_VALID \ |
||||
and LicenseEditionChoices.ULTIMATE == \ |
||||
LicenseEditionChoices.from_key(settings.XPACK_LICENSE_EDITION) \ |
||||
and settings.FACE_RECOGNITION_ENABLED |
||||
|
||||
def get_enable_url(self) -> str: |
||||
return reverse('authentication:user-face-enable') |
||||
|
||||
def get_disable_url(self) -> str: |
||||
return reverse('authentication:user-face-disable') |
||||
|
||||
def disable(self): |
||||
assert self.is_authenticated() |
||||
self.user.face_vector = '' |
||||
self.user.save(update_fields=['face_vector']) |
||||
|
||||
def can_disable(self) -> bool: |
||||
return True |
||||
|
||||
@staticmethod |
||||
def help_text_of_enable(): |
||||
return _("Frontal Face Recognition") |
@ -0,0 +1,92 @@
|
||||
{% extends '_base_only_content.html' %} |
||||
{% load i18n %} |
||||
{% load static %} |
||||
|
||||
{% block content %} |
||||
|
||||
{% if 'code' in form.errors %} |
||||
<div class="alert alert-danger" id="messages"> |
||||
<p class="red-fonts">{{ form.code.errors.as_text }}</p> |
||||
</div> |
||||
{% endif %} |
||||
|
||||
<div id="retry_container" style="text-align: center; margin-top: 20px; display: none;"> |
||||
<button id="retry_button" class="btn btn-primary">{% trans 'Retry' %}</button> |
||||
</div> |
||||
|
||||
<form class="m-t" role="form" method="post" action="" style="display: none"> |
||||
{% csrf_token %} |
||||
<button id="submit_button" type="submit" style="display: none"></button> |
||||
</form> |
||||
|
||||
<div id="iframe_container" |
||||
style="display: none; justify-content: center; align-items: center; height: 520px; width: 100%;"> |
||||
<iframe |
||||
title="face capture" |
||||
id="face_capture_iframe" |
||||
allow="camera" |
||||
sandbox="allow-scripts allow-same-origin" |
||||
style="width: 100%; height:100%;border: none;"> |
||||
</iframe> |
||||
</div> |
||||
|
||||
|
||||
|
||||
<script> |
||||
$(document).ready(function () { |
||||
const apiUrl = "{% url 'api-auth:mfa-face-context' %}"; |
||||
const faceCaptureUrl = "/facelive/capture"; |
||||
let token; |
||||
|
||||
function createFaceCaptureToken() { |
||||
const csrf = getCookie('jms_csrftoken'); |
||||
$.ajax({ |
||||
url: apiUrl, |
||||
method: 'POST', |
||||
headers: { |
||||
'X-CSRFToken': csrf |
||||
}, |
||||
success: function (data) { |
||||
token = data.token; |
||||
$('#iframe_container').show(); |
||||
$('#face_capture_iframe').attr('src', `${faceCaptureUrl}?token=${token}`); |
||||
startCheckingStatus(); |
||||
}, |
||||
error: function (error) { |
||||
$('#retry_container').show(); |
||||
} |
||||
}); |
||||
} |
||||
|
||||
function startCheckingStatus() { |
||||
const interval = 1000; |
||||
const timer = setInterval(function () { |
||||
$.ajax({ |
||||
url: `${apiUrl}?token=${token}`, |
||||
method: 'GET', |
||||
success: function (data) { |
||||
if (data.is_finished) { |
||||
clearInterval(timer); |
||||
$('#submit_button').click(); |
||||
} |
||||
}, |
||||
error: function (error) { |
||||
console.error('API request failed:', error); |
||||
} |
||||
}); |
||||
}, interval); |
||||
} |
||||
|
||||
const active = "{{ active }}"; |
||||
if (active) { |
||||
createFaceCaptureToken(); |
||||
} else { |
||||
$('#retry_container').show(); |
||||
} |
||||
|
||||
$('#retry_button').on('click', function () { |
||||
window.location.href = "{% url 'authentication:login-face-capture' %}"; |
||||
}); |
||||
}); |
||||
</script> |
||||
{% endblock %} |
@ -0,0 +1,41 @@
|
||||
#!/usr/bin/env python |
||||
# coding: utf-8 |
||||
# Copyright (c) 2018 |
||||
# |
||||
|
||||
__version__ = '0.0.59' |
||||
|
||||
from .ftp import FTPStorage |
||||
from .oss import OSSStorage |
||||
from .obs import OBSStorage |
||||
from .s3 import S3Storage |
||||
from .azure import AzureStorage |
||||
from .ceph import CEPHStorage |
||||
from .jms import JMSReplayStorage, JMSCommandStorage |
||||
from .multi import MultiObjectStorage |
||||
from .sftp import SFTPStorage |
||||
|
||||
|
||||
def get_object_storage(config): |
||||
if config.get("TYPE") in ["s3", "ceph", "swift", "cos"]: |
||||
return S3Storage(config) |
||||
elif config.get("TYPE") == "oss": |
||||
return OSSStorage(config) |
||||
elif config.get("TYPE") == "server": |
||||
return JMSReplayStorage(config) |
||||
elif config.get("TYPE") == "azure": |
||||
return AzureStorage(config) |
||||
elif config.get("TYPE") == "ceph": |
||||
return CEPHStorage(config) |
||||
elif config.get("TYPE") == "ftp": |
||||
return FTPStorage(config) |
||||
elif config.get("TYPE") == "obs": |
||||
return OBSStorage(config) |
||||
elif config.get("TYPE") == "sftp": |
||||
return SFTPStorage(config) |
||||
else: |
||||
return JMSReplayStorage(config) |
||||
|
||||
|
||||
def get_multi_object_storage(configs): |
||||
return MultiObjectStorage(configs) |
@ -0,0 +1,61 @@
|
||||
# -*- coding: utf-8 -*- |
||||
# |
||||
|
||||
import os |
||||
|
||||
from azure.storage.blob import BlobServiceClient |
||||
|
||||
from .base import ObjectStorage |
||||
|
||||
|
||||
class AzureStorage(ObjectStorage): |
||||
|
||||
def __init__(self, config): |
||||
self.account_name = config.get("ACCOUNT_NAME", None) |
||||
self.account_key = config.get("ACCOUNT_KEY", None) |
||||
self.container_name = config.get("CONTAINER_NAME", None) |
||||
self.endpoint_suffix = config.get("ENDPOINT_SUFFIX", 'core.chinacloudapi.cn') |
||||
|
||||
if self.account_name and self.account_key: |
||||
self.service_client = BlobServiceClient( |
||||
account_url=f'https://{self.account_name}.blob.{self.endpoint_suffix}', |
||||
credential={'account_name': self.account_name, 'account_key': self.account_key} |
||||
) |
||||
self.client = self.service_client.get_container_client(self.container_name) |
||||
else: |
||||
self.client = None |
||||
|
||||
def upload(self, src, target): |
||||
try: |
||||
self.client.upload_blob(target, src) |
||||
return True, None |
||||
except Exception as e: |
||||
return False, e |
||||
|
||||
def download(self, src, target): |
||||
try: |
||||
blob_data = self.client.download_blob(blob=src) |
||||
os.makedirs(os.path.dirname(target), 0o755, exist_ok=True) |
||||
with open(target, 'wb') as writer: |
||||
writer.write(blob_data.readall()) |
||||
return True, None |
||||
except Exception as e: |
||||
return False, e |
||||
|
||||
def delete(self, path): |
||||
try: |
||||
self.client.delete_blob(path) |
||||
return True, False |
||||
except Exception as e: |
||||
return False, e |
||||
|
||||
def exists(self, path): |
||||
resp = self.client.list_blobs(name_starts_with=path) |
||||
return len(list(resp)) != 0 |
||||
|
||||
def list_buckets(self): |
||||
return list(self.service_client.list_containers()) |
||||
|
||||
@property |
||||
def type(self): |
||||
return 'azure' |
@ -0,0 +1,51 @@
|
||||
# -*- coding: utf-8 -*- |
||||
# |
||||
|
||||
import abc |
||||
|
||||
|
||||
class ObjectStorage(metaclass=abc.ABCMeta): |
||||
@abc.abstractmethod |
||||
def upload(self, src, target): |
||||
return None, None |
||||
|
||||
@abc.abstractmethod |
||||
def download(self, src, target): |
||||
pass |
||||
|
||||
@abc.abstractmethod |
||||
def delete(self, path): |
||||
pass |
||||
|
||||
@abc.abstractmethod |
||||
def exists(self, path): |
||||
pass |
||||
|
||||
def is_valid(self, src, target): |
||||
ok, msg = self.upload(src=src, target=target) |
||||
if not ok: |
||||
return False |
||||
self.delete(path=target) |
||||
return True |
||||
|
||||
|
||||
class LogStorage(metaclass=abc.ABCMeta): |
||||
@abc.abstractmethod |
||||
def save(self, command): |
||||
pass |
||||
|
||||
@abc.abstractmethod |
||||
def bulk_save(self, command_set, raise_on_error=True): |
||||
pass |
||||
|
||||
@abc.abstractmethod |
||||
def filter(self, date_from=None, date_to=None, |
||||
user=None, asset=None, account=None, |
||||
input=None, session=None): |
||||
pass |
||||
|
||||
@abc.abstractmethod |
||||
def count(self, date_from=None, date_to=None, |
||||
user=None, asset=None, account=None, |
||||
input=None, session=None): |
||||
pass |
@ -0,0 +1,68 @@
|
||||
# -*- coding: utf-8 -*- |
||||
# |
||||
|
||||
import os |
||||
import boto |
||||
import boto.s3.connection |
||||
|
||||
from .base import ObjectStorage |
||||
|
||||
|
||||
class CEPHStorage(ObjectStorage): |
||||
|
||||
def __init__(self, config): |
||||
self.bucket = config.get("BUCKET", None) |
||||
self.region = config.get("REGION", None) |
||||
self.access_key = config.get("ACCESS_KEY", None) |
||||
self.secret_key = config.get("SECRET_KEY", None) |
||||
self.hostname = config.get("HOSTNAME", None) |
||||
self.port = config.get("PORT", 7480) |
||||
|
||||
if self.hostname and self.access_key and self.secret_key: |
||||
self.conn = boto.connect_s3( |
||||
aws_access_key_id=self.access_key, |
||||
aws_secret_access_key=self.secret_key, |
||||
host=self.hostname, |
||||
port=self.port, |
||||
is_secure=False, |
||||
calling_format=boto.s3.connection.OrdinaryCallingFormat(), |
||||
) |
||||
|
||||
try: |
||||
self.client = self.conn.get_bucket(bucket_name=self.bucket) |
||||
except Exception: |
||||
self.client = None |
||||
|
||||
def upload(self, src, target): |
||||
try: |
||||
key = self.client.new_key(target) |
||||
key.set_contents_from_filename(src) |
||||
return True, None |
||||
except Exception as e: |
||||
return False, e |
||||
|
||||
def download(self, src, target): |
||||
try: |
||||
os.makedirs(os.path.dirname(target), 0o755, exist_ok=True) |
||||
key = self.client.get_key(src) |
||||
key.get_contents_to_filename(target) |
||||
return True, None |
||||
except Exception as e: |
||||
return False, e |
||||
|
||||
def delete(self, path): |
||||
try: |
||||
self.client.delete_key(path) |
||||
return True, None |
||||
except Exception as e: |
||||
return False, e |
||||
|
||||
def exists(self, path): |
||||
try: |
||||
return self.client.get_key(path) |
||||
except Exception: |
||||
return False |
||||
|
||||
@property |
||||
def type(self): |
||||
return 'ceph' |
@ -0,0 +1,116 @@
|
||||
# -*- coding: utf-8 -*- |
||||
# |
||||
|
||||
import os |
||||
from ftplib import FTP, error_perm |
||||
from .base import ObjectStorage |
||||
|
||||
|
||||
class FTPStorage(ObjectStorage): |
||||
|
||||
def __init__(self, config): |
||||
self.host = config.get("HOST", None) |
||||
self.port = int(config.get("PORT", 21)) |
||||
self.username = config.get("USERNAME", None) |
||||
self.password = config.get("PASSWORD", None) |
||||
self.pasv = bool(config.get("PASV", False)) |
||||
self.dir = config.get("DIR", "replay") |
||||
self.client = FTP() |
||||
self.client.encoding = 'utf-8' |
||||
self.client.set_pasv(self.pasv) |
||||
self.pwd = '.' |
||||
self.connect() |
||||
|
||||
def connect(self, timeout=-999, source_address=None): |
||||
self.client.connect(self.host, self.port, timeout, source_address) |
||||
self.client.login(self.username, self.password) |
||||
if not self.check_dir_exist(self.dir): |
||||
self.mkdir(self.dir) |
||||
self.client.cwd(self.dir) |
||||
self.pwd = self.client.pwd() |
||||
|
||||
def confirm_connected(self): |
||||
try: |
||||
self.client.pwd() |
||||
except Exception: |
||||
self.connect() |
||||
|
||||
def upload(self, src, target): |
||||
self.confirm_connected() |
||||
target_dir = os.path.dirname(target) |
||||
exist = self.check_dir_exist(target_dir) |
||||
if not exist: |
||||
ok = self.mkdir(target_dir) |
||||
if not ok: |
||||
raise PermissionError('Dir create error: %s' % target) |
||||
try: |
||||
with open(src, 'rb') as f: |
||||
self.client.storbinary('STOR '+target, f) |
||||
return True, None |
||||
except Exception as e: |
||||
return False, e |
||||
|
||||
def download(self, src, target): |
||||
self.confirm_connected() |
||||
try: |
||||
os.makedirs(os.path.dirname(target), 0o755, exist_ok=True) |
||||
with open(target, 'wb') as f: |
||||
self.client.retrbinary('RETR ' + src, f.write) |
||||
return True, None |
||||
except Exception as e: |
||||
return False, e |
||||
|
||||
def delete(self, path): |
||||
self.confirm_connected() |
||||
if not self.exists(path): |
||||
raise FileNotFoundError('File not exist error(%s)' % path) |
||||
try: |
||||
self.client.delete(path) |
||||
return True, None |
||||
except Exception as e: |
||||
return False, e |
||||
|
||||
def check_dir_exist(self, d): |
||||
pwd = self.client.pwd() |
||||
try: |
||||
self.client.cwd(d) |
||||
self.client.cwd(pwd) |
||||
return True |
||||
except error_perm: |
||||
return False |
||||
|
||||
def mkdir(self, dirs): |
||||
self.confirm_connected() |
||||
# 创建多级目录,ftplib不支持一次创建多级目录 |
||||
dir_list = dirs.split('/') |
||||
pwd = self.client.pwd() |
||||
try: |
||||
for d in dir_list: |
||||
if not d or d in ['.']: |
||||
continue |
||||
# 尝试切换目录 |
||||
try: |
||||
self.client.cwd(d) |
||||
continue |
||||
except: |
||||
pass |
||||
# 切换失败创建这个目录,再切换 |
||||
try: |
||||
self.client.mkd(d) |
||||
self.client.cwd(d) |
||||
except: |
||||
return False |
||||
return True |
||||
finally: |
||||
self.client.cwd(pwd) |
||||
|
||||
def exists(self, target): |
||||
self.confirm_connected() |
||||
try: |
||||
self.client.size(target) |
||||
return True |
||||
except: |
||||
return False |
||||
|
||||
def close(self): |
||||
self.client.close() |
@ -0,0 +1,50 @@
|
||||
# -*- coding: utf-8 -*- |
||||
# |
||||
import os |
||||
from .base import ObjectStorage, LogStorage |
||||
|
||||
|
||||
class JMSReplayStorage(ObjectStorage): |
||||
def __init__(self, config): |
||||
self.client = config.get("SERVICE") |
||||
|
||||
def upload(self, src, target): |
||||
session_id = os.path.basename(target).split('.')[0] |
||||
ok = self.client.push_session_replay(src, session_id) |
||||
return ok, None |
||||
|
||||
def delete(self, path): |
||||
return False, Exception("Not support not") |
||||
|
||||
def exists(self, path): |
||||
return False |
||||
|
||||
def download(self, src, target): |
||||
return False, Exception("Not support not") |
||||
|
||||
@property |
||||
def type(self): |
||||
return 'jms' |
||||
|
||||
|
||||
class JMSCommandStorage(LogStorage): |
||||
def __init__(self, config): |
||||
self.client = config.get("SERVICE") |
||||
if not self.client: |
||||
raise Exception("Not found app service") |
||||
|
||||
def save(self, command): |
||||
return self.client.push_session_command([command]) |
||||
|
||||
def bulk_save(self, command_set, raise_on_error=True): |
||||
return self.client.push_session_command(command_set) |
||||
|
||||
def filter(self, date_from=None, date_to=None, |
||||
user=None, asset=None, account=None, |
||||
input=None, session=None): |
||||
pass |
||||
|
||||
def count(self, date_from=None, date_to=None, |
||||
user=None, asset=None, account=None, |
||||
input=None, session=None): |
||||
pass |
@ -0,0 +1,77 @@
|
||||
# -*- coding: utf-8 -*- |
||||
# |
||||
|
||||
from .base import ObjectStorage, LogStorage |
||||
|
||||
|
||||
class MultiObjectStorage(ObjectStorage): |
||||
|
||||
def __init__(self, configs): |
||||
self.configs = configs |
||||
self.storage_list = [] |
||||
self.init_storage_list() |
||||
|
||||
def init_storage_list(self): |
||||
from . import get_object_storage |
||||
if isinstance(self.configs, dict): |
||||
configs = self.configs.values() |
||||
else: |
||||
configs = self.configs |
||||
|
||||
for config in configs: |
||||
try: |
||||
storage = get_object_storage(config) |
||||
self.storage_list.append(storage) |
||||
except Exception: |
||||
pass |
||||
|
||||
def upload(self, src, target): |
||||
success = [] |
||||
msg = [] |
||||
|
||||
for storage in self.storage_list: |
||||
ok, err = storage.upload(src, target) |
||||
success.append(ok) |
||||
msg.append(err) |
||||
|
||||
return success, msg |
||||
|
||||
def download(self, src, target): |
||||
success = False |
||||
msg = None |
||||
|
||||
for storage in self.storage_list: |
||||
try: |
||||
if not storage.exists(src): |
||||
continue |
||||
ok, msg = storage.download(src, target) |
||||
if ok: |
||||
success = True |
||||
msg = '' |
||||
break |
||||
except: |
||||
pass |
||||
return success, msg |
||||
|
||||
def delete(self, path): |
||||
success = True |
||||
msg = None |
||||
|
||||
for storage in self.storage_list: |
||||
try: |
||||
if storage.exists(path): |
||||
ok, msg = storage.delete(path) |
||||
if not ok: |
||||
success = False |
||||
except: |
||||
pass |
||||
return success, msg |
||||
|
||||
def exists(self, path): |
||||
for storage in self.storage_list: |
||||
try: |
||||
if storage.exists(path): |
||||
return True |
||||
except: |
||||
pass |
||||
return False |
@ -0,0 +1,70 @@
|
||||
# -*- coding: utf-8 -*- |
||||
# |
||||
import os |
||||
|
||||
from obs.client import ObsClient |
||||
from .base import ObjectStorage |
||||
|
||||
|
||||
class OBSStorage(ObjectStorage): |
||||
def __init__(self, config): |
||||
self.endpoint = config.get("ENDPOINT", None) |
||||
self.bucket = config.get("BUCKET", None) |
||||
self.access_key = config.get("ACCESS_KEY", None) |
||||
self.secret_key = config.get("SECRET_KEY", None) |
||||
if self.access_key and self.secret_key and self.endpoint: |
||||
proxy_host = os.getenv("proxy_host") |
||||
proxy_port = os.getenv("proxy_port") |
||||
proxy_username = os.getenv("proxy_username") |
||||
proxy_password = os.getenv("proxy_password") |
||||
self.obsClient = ObsClient(access_key_id=self.access_key, secret_access_key=self.secret_key, server=self.endpoint, proxy_host=proxy_host, proxy_port=proxy_port, proxy_username=proxy_username, proxy_password=proxy_password) |
||||
else: |
||||
self.obsClient = None |
||||
|
||||
def upload(self, src, target): |
||||
try: |
||||
resp = self.obsClient.putFile(self.bucket, target, src) |
||||
if resp.status < 300: |
||||
return True, None |
||||
else: |
||||
return False, resp.reason |
||||
except Exception as e: |
||||
return False, e |
||||
|
||||
def exists(self, path): |
||||
resp = self.obsClient.getObjectMetadata(self.bucket, path) |
||||
if resp.status < 300: |
||||
return True |
||||
return False |
||||
|
||||
def delete(self, path): |
||||
try: |
||||
resp = self.obsClient.deleteObject(self.bucket, path) |
||||
if resp.status < 300: |
||||
return True, None |
||||
else: |
||||
return False, resp.reason |
||||
except Exception as e: |
||||
return False, e |
||||
|
||||
def download(self, src, target): |
||||
try: |
||||
os.makedirs(os.path.dirname(target), 0o755, exist_ok=True) |
||||
resp = self.obsClient.getObject(self.bucket, src, target) |
||||
if resp.status < 300: |
||||
return True, None |
||||
else: |
||||
return False, resp.reason |
||||
except Exception as e: |
||||
return False, e |
||||
|
||||
def list_buckets(self): |
||||
resp = self.obsClient.listBuckets() |
||||
if resp.status < 300: |
||||
return [b.name for b in resp.body.buckets] |
||||
else: |
||||
raise RuntimeError(resp.status, str(resp.reason)) |
||||
|
||||
@property |
||||
def type(self): |
||||
return 'obs' |
@ -0,0 +1,72 @@
|
||||
# -*- coding: utf-8 -*- |
||||
# |
||||
import os |
||||
import time |
||||
|
||||
import oss2 |
||||
|
||||
from .base import ObjectStorage |
||||
|
||||
|
||||
class OSSStorage(ObjectStorage): |
||||
def __init__(self, config): |
||||
self.endpoint = config.get("ENDPOINT", None) |
||||
self.bucket = config.get("BUCKET", None) |
||||
self.access_key = config.get("ACCESS_KEY", None) |
||||
self.secret_key = config.get("SECRET_KEY", None) |
||||
if self.access_key and self.secret_key: |
||||
self.auth = oss2.Auth(self.access_key, self.secret_key) |
||||
else: |
||||
self.auth = None |
||||
if self.auth and self.endpoint and self.bucket: |
||||
self.client = oss2.Bucket(self.auth, self.endpoint, self.bucket) |
||||
else: |
||||
self.client = None |
||||
|
||||
def upload(self, src, target): |
||||
try: |
||||
self.client.put_object_from_file(target, src) |
||||
return True, None |
||||
except Exception as e: |
||||
return False, e |
||||
|
||||
def exists(self, path): |
||||
try: |
||||
return self.client.object_exists(path) |
||||
except Exception as e: |
||||
return False |
||||
|
||||
def delete(self, path): |
||||
try: |
||||
self.client.delete_object(path) |
||||
return True, None |
||||
except Exception as e: |
||||
return False, e |
||||
|
||||
def restore(self, path): |
||||
meta = self.client.head_object(path) |
||||
if meta.resp.headers['x-oss-storage-class'] == oss2.BUCKET_STORAGE_CLASS_ARCHIVE: |
||||
self.client.restore_object(path) |
||||
while True: |
||||
meta = self.client.head_object(path) |
||||
if meta.resp.headers['x-oss-restore'] == 'ongoing-request="true"': |
||||
time.sleep(5) |
||||
else: |
||||
break |
||||
|
||||
def download(self, src, target): |
||||
try: |
||||
os.makedirs(os.path.dirname(target), 0o755, exist_ok=True) |
||||
self.restore(src) |
||||
self.client.get_object_to_file(src, target) |
||||
return True, None |
||||
except Exception as e: |
||||
return False, e |
||||
|
||||
def list_buckets(self): |
||||
service = oss2.Service(self.auth,self.endpoint) |
||||
return ([b.name for b in oss2.BucketIterator(service)]) |
||||
|
||||
@property |
||||
def type(self): |
||||
return 'oss' |
@ -0,0 +1,74 @@
|
||||
# -*- coding: utf-8 -*- |
||||
# |
||||
import boto3 |
||||
import os |
||||
|
||||
from .base import ObjectStorage |
||||
|
||||
|
||||
class S3Storage(ObjectStorage): |
||||
def __init__(self, config): |
||||
self.bucket = config.get("BUCKET", "jumpserver") |
||||
self.region = config.get("REGION", None) |
||||
self.access_key = config.get("ACCESS_KEY", None) |
||||
self.secret_key = config.get("SECRET_KEY", None) |
||||
self.endpoint = config.get("ENDPOINT", None) |
||||
|
||||
try: |
||||
self.client = boto3.client( |
||||
's3', region_name=self.region, |
||||
aws_access_key_id=self.access_key, |
||||
aws_secret_access_key=self.secret_key, |
||||
endpoint_url=self.endpoint |
||||
) |
||||
except ValueError: |
||||
pass |
||||
|
||||
def upload(self, src, target): |
||||
try: |
||||
self.client.upload_file(Filename=src, Bucket=self.bucket, Key=target) |
||||
return True, None |
||||
except Exception as e: |
||||
return False, e |
||||
|
||||
def exists(self, path): |
||||
try: |
||||
self.client.head_object(Bucket=self.bucket, Key=path) |
||||
return True |
||||
except Exception as e: |
||||
return False |
||||
|
||||
def download(self, src, target): |
||||
try: |
||||
os.makedirs(os.path.dirname(target), 0o755, exist_ok=True) |
||||
self.client.download_file(self.bucket, src, target) |
||||
return True, None |
||||
except Exception as e: |
||||
return False, e |
||||
|
||||
def delete(self, path): |
||||
try: |
||||
self.client.delete_object(Bucket=self.bucket, Key=path) |
||||
return True, None |
||||
except Exception as e: |
||||
return False, e |
||||
|
||||
def generate_presigned_url(self, path, expire=3600): |
||||
try: |
||||
return self.client.generate_presigned_url( |
||||
ClientMethod='get_object', |
||||
Params={'Bucket': self.bucket, 'Key': path}, |
||||
ExpiresIn=expire, |
||||
HttpMethod='GET'), None |
||||
except Exception as e: |
||||
return False, e |
||||
|
||||
def list_buckets(self): |
||||
response = self.client.list_buckets() |
||||
buckets = response.get('Buckets', []) |
||||
result = [b['Name'] for b in buckets if b.get('Name')] |
||||
return result |
||||
|
||||
@property |
||||
def type(self): |
||||
return 's3' |
@ -0,0 +1,109 @@
|
||||
# -*- coding: utf-8 -*- |
||||
import io |
||||
import os |
||||
|
||||
import paramiko |
||||
|
||||
from .base import ObjectStorage |
||||
|
||||
|
||||
class SFTPStorage(ObjectStorage): |
||||
|
||||
def __init__(self, config): |
||||
self.sftp = None |
||||
self.sftp_host = config.get('SFTP_HOST', None) |
||||
self.sftp_port = int(config.get('SFTP_PORT', 22)) |
||||
self.sftp_username = config.get('SFTP_USERNAME', '') |
||||
self.sftp_secret_type = config.get('STP_SECRET_TYPE', 'password') |
||||
self.sftp_password = config.get('SFTP_PASSWORD', '') |
||||
self.sftp_private_key = config.get('STP_PRIVATE_KEY', '') |
||||
self.sftp_passphrase = config.get('STP_PASSPHRASE', '') |
||||
self.sftp_root_path = config.get('SFTP_ROOT_PATH', '/tmp') |
||||
self.ssh = paramiko.SSHClient() |
||||
self.connect() |
||||
|
||||
def connect(self): |
||||
self.ssh.set_missing_host_key_policy(paramiko.AutoAddPolicy()) |
||||
if self.sftp_secret_type == 'password': |
||||
self.ssh.connect(self.sftp_host, self.sftp_port, self.sftp_username, self.sftp_password) |
||||
elif self.sftp_secret_type == 'ssh_key': |
||||
pkey = paramiko.RSAKey.from_private_key(io.StringIO(self.sftp_private_key)) |
||||
self.ssh.connect(self.sftp_host, self.sftp_port, self.sftp_username, pkey=pkey, |
||||
passphrase=self.sftp_passphrase) |
||||
self.sftp = self.ssh.open_sftp() |
||||
|
||||
def confirm_connected(self): |
||||
try: |
||||
self.sftp.getcwd() |
||||
except Exception as e: |
||||
self.connect() |
||||
|
||||
def upload(self, src, target): |
||||
local_file = src |
||||
remote_file = os.path.join(self.sftp_root_path, target) |
||||
try: |
||||
self.confirm_connected() |
||||
mode = os.stat(local_file).st_mode |
||||
remote_dir = os.path.dirname(remote_file) |
||||
if not self.exists(remote_dir): |
||||
self.sftp.mkdir(remote_dir) |
||||
self.sftp.put(local_file, remote_file) |
||||
self.sftp.chmod(remote_file, mode) |
||||
return True, None |
||||
except Exception as e: |
||||
return False, e |
||||
|
||||
def download(self, src, target): |
||||
remote_file = src |
||||
local_file = target |
||||
self.confirm_connected() |
||||
try: |
||||
local_dir = os.path.dirname(local_file) |
||||
if not os.path.exists(local_dir): |
||||
os.makedirs(local_dir) |
||||
mode = self.sftp.stat(remote_file).st_mode |
||||
self.sftp.get(remote_file, local_file) |
||||
os.chmod(local_file, mode) |
||||
return True, None |
||||
except Exception as e: |
||||
return False, e |
||||
|
||||
def delete(self, path): |
||||
path = os.path.join(self.sftp_root_path, path) |
||||
self.confirm_connected() |
||||
if not self.exists(path): |
||||
raise FileNotFoundError('File not exist error(%s)' % path) |
||||
try: |
||||
self.sftp.remove(path) |
||||
return True, None |
||||
except Exception as e: |
||||
return False, e |
||||
|
||||
def check_dir_exist(self, d): |
||||
self.confirm_connected() |
||||
try: |
||||
self.sftp.stat(d) |
||||
return True |
||||
except Exception: |
||||
return False |
||||
|
||||
def mkdir(self, dirs): |
||||
self.confirm_connected() |
||||
try: |
||||
if not self.exists(dirs): |
||||
self.sftp.mkdir(dirs) |
||||
return True, None |
||||
except Exception as e: |
||||
return False, e |
||||
|
||||
def exists(self, target): |
||||
self.confirm_connected() |
||||
try: |
||||
self.sftp.stat(target) |
||||
return True |
||||
except: |
||||
return False |
||||
|
||||
def close(self): |
||||
self.sftp.close() |
||||
self.ssh.close() |
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
@ -0,0 +1,25 @@
|
||||
# -*- coding: utf-8 -*- |
||||
from rest_framework.decorators import action |
||||
from rest_framework.response import Response |
||||
|
||||
from common.api.generic import JMSModelViewSet |
||||
from common.const.http import OPTIONS, GET |
||||
from common.permissions import IsValidUser |
||||
from ..models import Variable |
||||
from ..serializers import VariableSerializer, VariableFormDataSerializer |
||||
|
||||
__all__ = [ |
||||
'VariableViewSet' |
||||
] |
||||
|
||||
|
||||
class VariableViewSet(JMSModelViewSet): |
||||
queryset = Variable.objects.all() |
||||
serializer_class = VariableSerializer |
||||
http_method_names = ['options', 'get'] |
||||
|
||||
@action(methods=[GET], detail=False, serializer_class=VariableFormDataSerializer, |
||||
permission_classes=[IsValidUser, ], url_path='form_data') |
||||
def form_data(self, request, *args, **kwargs): |
||||
# 只是为了动态返回serializer fields info |
||||
return Response({}) |
@ -0,0 +1,28 @@
|
||||
# Generated by Django 4.1.13 on 2024-10-21 08:02 |
||||
from django.conf import settings |
||||
from django.db import migrations, models |
||||
|
||||
|
||||
class Migration(migrations.Migration): |
||||
dependencies = [ |
||||
migrations.swappable_dependency(settings.AUTH_USER_MODEL), |
||||
('assets', '0006_database_pg_ssl_mode'), |
||||
('ops', '0003_alter_adhoc_unique_together_and_more'), |
||||
] |
||||
|
||||
operations = [ |
||||
migrations.AddField( |
||||
model_name='job', |
||||
name='nodes', |
||||
field=models.ManyToManyField(blank=True, to='assets.node', verbose_name='Node'), |
||||
), |
||||
migrations.AlterField( |
||||
model_name='job', |
||||
name='assets', |
||||
field=models.ManyToManyField(blank=True, to='assets.asset', verbose_name='Assets'), |
||||
), |
||||
migrations.AlterUniqueTogether( |
||||
name='job', |
||||
unique_together={('name', 'org_id', 'creator', 'type')}, |
||||
), |
||||
] |
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in new issue