mirror of https://github.com/jumpserver/jumpserver
Merge branch 'new_api' of github.com:jumpserver/jumpserver into new_api
commit
14de3ba5de
|
@ -0,0 +1,7 @@
|
|||
.git
|
||||
logs/*
|
||||
data/*
|
||||
.github
|
||||
tmp/*
|
||||
django.db
|
||||
celerybeat.pid
|
|
@ -26,3 +26,7 @@ jumpserver.iml
|
|||
tmp/*
|
||||
sessions/*
|
||||
media
|
||||
celerybeat.pid
|
||||
django.db
|
||||
celerybeat-schedule.db
|
||||
static
|
||||
|
|
|
@ -2,6 +2,4 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
#
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
pass
|
||||
__version__ = "0.5.0"
|
||||
|
|
|
@ -25,9 +25,9 @@ from .hands import IsSuperUser, IsValidUser, IsSuperUserOrAppUser, \
|
|||
get_user_granted_assets
|
||||
from .models import AssetGroup, Asset, Cluster, SystemUser, AdminUser
|
||||
from . import serializers
|
||||
from .tasks import update_assets_hardware_info, test_admin_user_connectability, \
|
||||
test_admin_user_connectability_manual, push_system_user_to_cluster_assets, \
|
||||
test_system_user_connectability
|
||||
from .tasks import update_assets_hardware_info_manual, test_admin_user_connectability_util, \
|
||||
test_asset_connectability_manual, push_system_user_to_cluster_assets_manual, \
|
||||
test_system_user_connectability_manual
|
||||
|
||||
|
||||
class AssetViewSet(IDInFilterMixin, BulkModelViewSet):
|
||||
|
@ -222,7 +222,7 @@ class AssetRefreshHardwareApi(generics.RetrieveAPIView):
|
|||
def retrieve(self, request, *args, **kwargs):
|
||||
asset_id = kwargs.get('pk')
|
||||
asset = get_object_or_404(Asset, pk=asset_id)
|
||||
summary = update_assets_hardware_info([asset])
|
||||
summary = update_assets_hardware_info_manual([asset])[1]
|
||||
if summary.get('dark'):
|
||||
return Response(summary['dark'].values(), status=501)
|
||||
else:
|
||||
|
@ -239,7 +239,7 @@ class AssetAdminUserTestApi(generics.RetrieveAPIView):
|
|||
def retrieve(self, request, *args, **kwargs):
|
||||
asset_id = kwargs.get('pk')
|
||||
asset = get_object_or_404(Asset, pk=asset_id)
|
||||
ok, msg = test_admin_user_connectability_manual(asset)
|
||||
ok, msg = test_asset_connectability_manual(asset)
|
||||
if ok:
|
||||
return Response({"msg": "pong"})
|
||||
else:
|
||||
|
@ -255,7 +255,7 @@ class AdminUserTestConnectiveApi(generics.RetrieveAPIView):
|
|||
|
||||
def retrieve(self, request, *args, **kwargs):
|
||||
admin_user = self.get_object()
|
||||
test_admin_user_connectability.delay(admin_user, force=True)
|
||||
test_admin_user_connectability_util.delay(admin_user)
|
||||
return Response({"msg": "Task created"})
|
||||
|
||||
|
||||
|
@ -268,7 +268,7 @@ class SystemUserPushApi(generics.RetrieveAPIView):
|
|||
|
||||
def retrieve(self, request, *args, **kwargs):
|
||||
system_user = self.get_object()
|
||||
push_system_user_to_cluster_assets.delay(system_user, force=True)
|
||||
push_system_user_to_cluster_assets_manual.delay(system_user)
|
||||
return Response({"msg": "Task created"})
|
||||
|
||||
|
||||
|
@ -281,5 +281,5 @@ class SystemUserTestConnectiveApi(generics.RetrieveAPIView):
|
|||
|
||||
def retrieve(self, request, *args, **kwargs):
|
||||
system_user = self.get_object()
|
||||
test_system_user_connectability.delay(system_user, force=True)
|
||||
test_system_user_connectability_manual.delay(system_user)
|
||||
return Response({"msg": "Task created"})
|
||||
|
|
|
@ -2,14 +2,20 @@
|
|||
#
|
||||
from django.utils.translation import ugettext as _
|
||||
|
||||
PUSH_SYSTEM_USER_PERIOD_LOCK_KEY = "PUSH_SYSTEM_USER_PERIOD_KEY"
|
||||
PUSH_SYSTEM_USER_PERIOD_TASK_NAME = _("PUSH SYSTEM USER TO CLUSTER PERIOD TASK")
|
||||
# PUSH_SYSTEM_USER_PERIOD_LOCK_KEY = "PUSH_SYSTEM_USER_PERIOD_KEY"
|
||||
PUSH_SYSTEM_USER_PERIOD_TASK_NAME = _("PUSH SYSTEM USER TO CLUSTER PERIOD: {}")
|
||||
PUSH_SYSTEM_USER_MANUAL_TASK_NAME = _("PUSH SYSTEM USER TO CLUSTER MANUALLY: {}")
|
||||
PUSH_SYSTEM_USER_TASK_NAME = _("PUSH SYSTEM USER TO CLUSTER: {}")
|
||||
PUSH_SYSTEM_USER_LOCK_KEY = "PUSH_SYSTEM_USER_TO_CLUSTER_LOCK_{}"
|
||||
# PUSH_SYSTEM_USER_LOCK_KEY = "PUSH_SYSTEM_USER_TO_CLUSTER_LOCK_{}"
|
||||
PUSH_SYSTEM_USER_ON_CHANGE_TASK_NAME = _("PUSH SYSTEM USER ON CHANGE: {}")
|
||||
PUSH_SYSTEM_USER_ON_CREATE_TASK_NAME = _("PUSH SYSTEM USER ON CREATE: {}")
|
||||
PUSH_SYSTEM_USERS_ON_ASSET_CREATE_TASK_NAME = _("PUSH SYSTEM USERS ON ASSET CREAT: {}")
|
||||
|
||||
|
||||
UPDATE_ASSETS_HARDWARE_TASK_NAME = _('UPDATE ASSETS HARDWARE INFO')
|
||||
UPDATE_ASSETS_HARDWARE_PERIOD_LOCK_KEY = "UPDATE_ASSETS_HARDWARE_PERIOD_LOCK_KEY"
|
||||
UPDATE_ASSETS_HARDWARE_MANUAL_TASK_NAME = _('UPDATE ASSETS HARDWARE INFO MANUALLY')
|
||||
UPDATE_ASSETS_HARDWARE_ON_CREATE_TASK_NAME = _('UPDATE ASSETS HARDWARE INFO ON CREATE')
|
||||
# UPDATE_ASSETS_HARDWARE_PERIOD_LOCK_KEY = "UPDATE_ASSETS_HARDWARE_PERIOD_LOCK_KEY"
|
||||
UPDATE_ASSETS_HARDWARE_PERIOD_TASK_NAME = _('UPDATE ASSETS HARDWARE INFO PERIOD')
|
||||
UPDATE_ASSETS_HARDWARE_TASKS = [
|
||||
{
|
||||
|
@ -20,10 +26,10 @@ UPDATE_ASSETS_HARDWARE_TASKS = [
|
|||
}
|
||||
]
|
||||
|
||||
TEST_ADMIN_USER_CONN_PERIOD_LOCK_KEY = "TEST_ADMIN_USER_CONN_PERIOD_KEY"
|
||||
TEST_ADMIN_USER_CONN_PERIOD_TASK_NAME = _("TEST ADMIN USER CONN PERIOD TASK")
|
||||
# TEST_ADMIN_USER_CONN_PERIOD_LOCK_KEY = "TEST_ADMIN_USER_CONN_PERIOD_KEY"
|
||||
TEST_ADMIN_USER_CONN_PERIOD_TASK_NAME = _("TEST ADMIN USER CONN PERIOD: {}")
|
||||
TEST_ADMIN_USER_CONN_MANUAL_TASK_NAME = _("TEST ADMIN USER CONN MANUALLY: {}")
|
||||
TEST_ADMIN_USER_CONN_TASK_NAME = _("TEST ADMIN USER CONN: {}")
|
||||
TEST_ADMIN_USER_CONN_LOCK_KEY = TEST_ADMIN_USER_CONN_TASK_NAME
|
||||
ADMIN_USER_CONN_CACHE_KEY = "ADMIN_USER_CONN_{}"
|
||||
TEST_ADMIN_USER_CONN_TASKS = [
|
||||
{
|
||||
|
@ -38,10 +44,8 @@ ASSET_ADMIN_CONN_CACHE_KEY = "ASSET_ADMIN_USER_CONN_{}"
|
|||
TEST_ASSET_CONN_TASK_NAME = _("ASSET CONN TEST MANUAL")
|
||||
|
||||
TEST_SYSTEM_USER_CONN_PERIOD_LOCK_KEY = "TEST_SYSTEM_USER_CONN_PERIOD_KEY"
|
||||
TEST_SYSTEM_USER_CONN_PERIOD_TASK_NAME = _("TEST SYSTEM USER CONN PERIOD TASK")
|
||||
TEST_SYSTEM_USER_CONN_CACHE_KEY_PREFIX = "SYSTEM_USER_CONN_"
|
||||
TEST_SYSTEM_USER_CONN_TASK_NAME = _("TEST SYSTEM USER CONN: {}")
|
||||
TEST_SYSTEM_USER_CONN_LOCK_KEY = "TEST_SYSTEM_USER_CONN_{}"
|
||||
TEST_SYSTEM_USER_CONN_PERIOD_TASK_NAME = _("TEST SYSTEM USER CONN PERIOD: {}")
|
||||
TEST_SYSTEM_USER_CONN_MANUAL_TASK_NAME = _("TEST SYSTEM USER CONN MANUALLY: {}")
|
||||
SYSTEM_USER_CONN_CACHE_KEY = "SYSTEM_USER_CONN_{}"
|
||||
TEST_SYSTEM_USER_CONN_TASKS = [
|
||||
{
|
||||
|
|
|
@ -224,13 +224,16 @@ class SystemUserForm(forms.ModelForm):
|
|||
password = self.cleaned_data.get('password', None)
|
||||
private_key_file = self.cleaned_data.get('private_key_file')
|
||||
auto_generate_key = self.cleaned_data.get('auto_generate_key')
|
||||
private_key = None
|
||||
public_key = None
|
||||
|
||||
if auto_generate_key:
|
||||
logger.info('Auto set system user auth')
|
||||
system_user.auto_gen_auth()
|
||||
else:
|
||||
private_key = private_key_file.read().strip().decode('utf-8')
|
||||
public_key = ssh_pubkey_gen(private_key=private_key)
|
||||
if private_key_file:
|
||||
private_key = private_key_file.read().strip().decode('utf-8')
|
||||
public_key = ssh_pubkey_gen(private_key=private_key)
|
||||
system_user.set_auth(password=password, private_key=private_key, public_key=public_key)
|
||||
return system_user
|
||||
|
||||
|
|
|
@ -0,0 +1,168 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
# Generated by Django 1.11 on 2017-12-21 16:06
|
||||
from __future__ import unicode_literals
|
||||
|
||||
import assets.models.utils
|
||||
from django.db import migrations, models
|
||||
import django.db.models.deletion
|
||||
import uuid
|
||||
|
||||
|
||||
def add_default_group(apps, schema_editor):
|
||||
group_model = apps.get_model("assets", "AssetGroup")
|
||||
db_alias = schema_editor.connection.alias
|
||||
group_model.objects.using(db_alias).create(
|
||||
name="Default"
|
||||
)
|
||||
|
||||
|
||||
def add_default_cluster(apps, schema_editor):
|
||||
cluster_model = apps.get_model("assets", "Cluster")
|
||||
db_alias = schema_editor.connection.alias
|
||||
cluster_model.objects.using(db_alias).create(
|
||||
name="Default"
|
||||
)
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
initial = True
|
||||
|
||||
dependencies = [
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.CreateModel(
|
||||
name='AdminUser',
|
||||
fields=[
|
||||
('id', models.UUIDField(default=uuid.uuid4, primary_key=True, serialize=False)),
|
||||
('name', models.CharField(max_length=128, unique=True, verbose_name='Name')),
|
||||
('username', models.CharField(max_length=16, verbose_name='Username')),
|
||||
('_password', models.CharField(blank=True, max_length=256, null=True, verbose_name='Password')),
|
||||
('_private_key', models.TextField(blank=True, max_length=4096, null=True, validators=[assets.models.utils.private_key_validator], verbose_name='SSH private key')),
|
||||
('_public_key', models.TextField(blank=True, max_length=4096, verbose_name='SSH public key')),
|
||||
('comment', models.TextField(blank=True, verbose_name='Comment')),
|
||||
('date_created', models.DateTimeField(auto_now_add=True)),
|
||||
('date_updated', models.DateTimeField(auto_now=True)),
|
||||
('created_by', models.CharField(max_length=32, null=True, verbose_name='Created by')),
|
||||
('become', models.BooleanField(default=True)),
|
||||
('become_method', models.CharField(choices=[('sudo', 'sudo'), ('su', 'su')], default='sudo', max_length=4)),
|
||||
('become_user', models.CharField(default='root', max_length=64)),
|
||||
('_become_pass', models.CharField(default='', max_length=128)),
|
||||
],
|
||||
options={
|
||||
'ordering': ['name'],
|
||||
},
|
||||
),
|
||||
migrations.CreateModel(
|
||||
name='Asset',
|
||||
fields=[
|
||||
('id', models.UUIDField(default=uuid.uuid4, primary_key=True, serialize=False)),
|
||||
('ip', models.GenericIPAddressField(db_index=True, verbose_name='IP')),
|
||||
('hostname', models.CharField(max_length=128, unique=True, verbose_name='Hostname')),
|
||||
('port', models.IntegerField(default=22, verbose_name='Port')),
|
||||
('is_active', models.BooleanField(default=True, verbose_name='Is active')),
|
||||
('type', models.CharField(blank=True, choices=[('Server', 'Server'), ('VM', 'VM'), ('Switch', 'Switch'), ('Router', 'Router'), ('Firewall', 'Firewall'), ('Storage', 'Storage')], default='Server', max_length=16, null=True, verbose_name='Asset type')),
|
||||
('env', models.CharField(blank=True, choices=[('Prod', 'Production'), ('Dev', 'Development'), ('Test', 'Testing')], default='Prod', max_length=8, null=True, verbose_name='Asset environment')),
|
||||
('status', models.CharField(blank=True, choices=[('In use', 'In use'), ('Out of use', 'Out of use')], default='In use', max_length=12, null=True, verbose_name='Asset status')),
|
||||
('public_ip', models.GenericIPAddressField(blank=True, null=True, verbose_name='Public IP')),
|
||||
('remote_card_ip', models.CharField(blank=True, max_length=16, null=True, verbose_name='Remote control card IP')),
|
||||
('cabinet_no', models.CharField(blank=True, max_length=32, null=True, verbose_name='Cabinet number')),
|
||||
('cabinet_pos', models.IntegerField(blank=True, null=True, verbose_name='Cabinet position')),
|
||||
('number', models.CharField(blank=True, max_length=32, null=True, verbose_name='Asset number')),
|
||||
('vendor', models.CharField(blank=True, max_length=64, null=True, verbose_name='Vendor')),
|
||||
('model', models.CharField(blank=True, max_length=54, null=True, verbose_name='Model')),
|
||||
('sn', models.CharField(blank=True, max_length=128, null=True, verbose_name='Serial number')),
|
||||
('cpu_model', models.CharField(blank=True, max_length=64, null=True, verbose_name='CPU model')),
|
||||
('cpu_count', models.IntegerField(null=True, verbose_name='CPU count')),
|
||||
('cpu_cores', models.IntegerField(null=True, verbose_name='CPU cores')),
|
||||
('memory', models.CharField(blank=True, max_length=64, null=True, verbose_name='Memory')),
|
||||
('disk_total', models.CharField(blank=True, max_length=1024, null=True, verbose_name='Disk total')),
|
||||
('disk_info', models.CharField(blank=True, max_length=1024, null=True, verbose_name='Disk info')),
|
||||
('platform', models.CharField(blank=True, max_length=128, null=True, verbose_name='Platform')),
|
||||
('os', models.CharField(blank=True, max_length=128, null=True, verbose_name='OS')),
|
||||
('os_version', models.CharField(blank=True, max_length=16, null=True, verbose_name='OS version')),
|
||||
('os_arch', models.CharField(blank=True, max_length=16, null=True, verbose_name='OS arch')),
|
||||
('hostname_raw', models.CharField(blank=True, max_length=128, null=True, verbose_name='Hostname raw')),
|
||||
('created_by', models.CharField(blank=True, max_length=32, null=True, verbose_name='Created by')),
|
||||
('date_created', models.DateTimeField(auto_now_add=True, null=True, verbose_name='Date created')),
|
||||
('comment', models.TextField(blank=True, default='', max_length=128, verbose_name='Comment')),
|
||||
('admin_user', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, to='assets.AdminUser', verbose_name='Admin user')),
|
||||
],
|
||||
),
|
||||
migrations.CreateModel(
|
||||
name='AssetGroup',
|
||||
fields=[
|
||||
('id', models.UUIDField(default=uuid.uuid4, primary_key=True, serialize=False)),
|
||||
('name', models.CharField(max_length=64, unique=True, verbose_name='Name')),
|
||||
('created_by', models.CharField(blank=True, max_length=32, verbose_name='Created by')),
|
||||
('date_created', models.DateTimeField(auto_now_add=True, null=True, verbose_name='Date created')),
|
||||
('comment', models.TextField(blank=True, verbose_name='Comment')),
|
||||
],
|
||||
options={
|
||||
'ordering': ['name'],
|
||||
},
|
||||
),
|
||||
migrations.CreateModel(
|
||||
name='Cluster',
|
||||
fields=[
|
||||
('id', models.UUIDField(default=uuid.uuid4, primary_key=True, serialize=False)),
|
||||
('name', models.CharField(max_length=32, verbose_name='Name')),
|
||||
('bandwidth', models.CharField(blank=True, max_length=32, verbose_name='Bandwidth')),
|
||||
('contact', models.CharField(blank=True, max_length=128, verbose_name='Contact')),
|
||||
('phone', models.CharField(blank=True, max_length=32, verbose_name='Phone')),
|
||||
('address', models.CharField(blank=True, max_length=128, verbose_name='Address')),
|
||||
('intranet', models.TextField(blank=True, verbose_name='Intranet')),
|
||||
('extranet', models.TextField(blank=True, verbose_name='Extranet')),
|
||||
('date_created', models.DateTimeField(auto_now_add=True, null=True, verbose_name='Date created')),
|
||||
('operator', models.CharField(blank=True, max_length=32, verbose_name='Operator')),
|
||||
('created_by', models.CharField(blank=True, max_length=32, verbose_name='Created by')),
|
||||
('comment', models.TextField(blank=True, verbose_name='Comment')),
|
||||
('admin_user', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, to='assets.AdminUser', verbose_name='Admin user')),
|
||||
],
|
||||
options={
|
||||
'ordering': ['name'],
|
||||
},
|
||||
),
|
||||
migrations.CreateModel(
|
||||
name='SystemUser',
|
||||
fields=[
|
||||
('id', models.UUIDField(default=uuid.uuid4, primary_key=True, serialize=False)),
|
||||
('name', models.CharField(max_length=128, unique=True, verbose_name='Name')),
|
||||
('username', models.CharField(max_length=16, verbose_name='Username')),
|
||||
('_password', models.CharField(blank=True, max_length=256, null=True, verbose_name='Password')),
|
||||
('_private_key', models.TextField(blank=True, max_length=4096, null=True, validators=[assets.models.utils.private_key_validator], verbose_name='SSH private key')),
|
||||
('_public_key', models.TextField(blank=True, max_length=4096, verbose_name='SSH public key')),
|
||||
('comment', models.TextField(blank=True, verbose_name='Comment')),
|
||||
('date_created', models.DateTimeField(auto_now_add=True)),
|
||||
('date_updated', models.DateTimeField(auto_now=True)),
|
||||
('created_by', models.CharField(max_length=32, null=True, verbose_name='Created by')),
|
||||
('priority', models.IntegerField(default=10, verbose_name='Priority')),
|
||||
('protocol', models.CharField(choices=[('ssh', 'ssh')], default='ssh', max_length=16, verbose_name='Protocol')),
|
||||
('auto_push', models.BooleanField(default=True, verbose_name='Auto push')),
|
||||
('sudo', models.TextField(default='/sbin/ifconfig', verbose_name='Sudo')),
|
||||
('shell', models.CharField(default='/bin/bash', max_length=64, verbose_name='Shell')),
|
||||
('cluster', models.ManyToManyField(blank=True, to='assets.Cluster', verbose_name='Cluster')),
|
||||
],
|
||||
options={
|
||||
'ordering': ['name'],
|
||||
},
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='asset',
|
||||
name='cluster',
|
||||
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='assets', to='assets.Cluster', verbose_name='Cluster'),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='asset',
|
||||
name='groups',
|
||||
field=models.ManyToManyField(blank=True, related_name='assets', to='assets.AssetGroup', verbose_name='Asset groups'),
|
||||
),
|
||||
migrations.AlterUniqueTogether(
|
||||
name='asset',
|
||||
unique_together=set([('ip', 'port')]),
|
||||
),
|
||||
|
||||
migrations.RunPython(add_default_cluster),
|
||||
migrations.RunPython(add_default_group),
|
||||
]
|
|
@ -13,13 +13,14 @@ from django.db import models
|
|||
from django.utils.translation import ugettext_lazy as _
|
||||
from django.conf import settings
|
||||
|
||||
from common.utils import signer, ssh_key_string_to_obj, ssh_key_gen
|
||||
from common.utils import get_signer, ssh_key_string_to_obj, ssh_key_gen
|
||||
from .utils import private_key_validator
|
||||
from ..const import SYSTEM_USER_CONN_CACHE_KEY
|
||||
|
||||
|
||||
__all__ = ['AdminUser', 'SystemUser',]
|
||||
logger = logging.getLogger(__name__)
|
||||
signer = get_signer()
|
||||
|
||||
|
||||
class AssetUser(models.Model):
|
||||
|
|
|
@ -8,10 +8,11 @@ from django.db.models.signals import post_save
|
|||
|
||||
from common.utils import get_object_or_none, capacity_convert, \
|
||||
sum_capacity, encrypt_password, get_logger
|
||||
from common.celery import app as celery_app
|
||||
from common.celery import register_as_period_task, after_app_shutdown_clean, \
|
||||
after_app_ready_start, app as celery_app
|
||||
|
||||
from .models import SystemUser, AdminUser, Asset
|
||||
from . import const
|
||||
from .signals import on_app_ready
|
||||
|
||||
|
||||
FORKS = 10
|
||||
|
@ -20,7 +21,18 @@ logger = get_logger(__file__)
|
|||
CACHE_MAX_TIME = 60*60*60
|
||||
|
||||
|
||||
def _update_asset_info(result_raw):
|
||||
@shared_task
|
||||
def update_assets_hardware_info(result, **kwargs):
|
||||
"""
|
||||
Using ops task run result, to update asset info
|
||||
|
||||
@shared_task must be exit, because we using it as a task callback, is must
|
||||
be a celery task also
|
||||
:param result:
|
||||
:param kwargs: {task_name: ""}
|
||||
:return:
|
||||
"""
|
||||
result_raw = result[0]
|
||||
assets_updated = []
|
||||
for hostname, info in result_raw['ok'].items():
|
||||
if info:
|
||||
|
@ -66,171 +78,240 @@ def _update_asset_info(result_raw):
|
|||
|
||||
|
||||
@shared_task
|
||||
def update_assets_hardware_info(assets, task_name=None):
|
||||
def update_assets_hardware_info_util(assets, task_name):
|
||||
"""
|
||||
Using ansible api to update asset hardware info
|
||||
:param assets: asset seq
|
||||
:param task_name: task_name running
|
||||
:return: result summary ['contacted': {}, 'dark': {}]
|
||||
"""
|
||||
from ops.utils import create_or_update_task
|
||||
if task_name is None:
|
||||
task_name = const.UPDATE_ASSETS_HARDWARE_TASK_NAME
|
||||
from ops.utils import update_or_create_ansible_task
|
||||
tasks = const.UPDATE_ASSETS_HARDWARE_TASKS
|
||||
hostname_list = [asset.hostname for asset in assets]
|
||||
task = create_or_update_task(
|
||||
task, _ = update_or_create_ansible_task(
|
||||
task_name, hosts=hostname_list, tasks=tasks, pattern='all',
|
||||
options=const.TASK_OPTIONS, run_as_admin=True, created_by='System',
|
||||
)
|
||||
result = task.run()
|
||||
summary, result_raw = result.results_summary, result.results_raw
|
||||
# TOdo: may be somewhere using
|
||||
assets_updated = _update_asset_info(result_raw)
|
||||
return summary
|
||||
# Todo: may be somewhere using
|
||||
# Manual run callback function
|
||||
assets_updated = update_assets_hardware_info(result)
|
||||
return result
|
||||
|
||||
|
||||
@shared_task
|
||||
def update_assets_hardware_period():
|
||||
def update_assets_hardware_info_manual(assets):
|
||||
task_name = const.UPDATE_ASSETS_HARDWARE_MANUAL_TASK_NAME
|
||||
return update_assets_hardware_info_util(assets, task_name)
|
||||
|
||||
|
||||
@receiver(post_save, sender=Asset, dispatch_uid="my_unique_identifier")
|
||||
def update_asset_info_on_created(sender, instance=None, created=False, **kwargs):
|
||||
if instance and created:
|
||||
msg = "Receive asset {} create signal, update asset hardware info".format(
|
||||
instance
|
||||
)
|
||||
logger.debug(msg)
|
||||
task_name = const.UPDATE_ASSETS_HARDWARE_ON_CREATE_TASK_NAME
|
||||
update_assets_hardware_info_util.delay([instance], task_name)
|
||||
|
||||
|
||||
@celery_app.task
|
||||
@register_as_period_task(interval=3600)
|
||||
@after_app_ready_start
|
||||
@after_app_shutdown_clean
|
||||
def update_assets_hardware_info_period():
|
||||
"""
|
||||
Update asset hardware period task
|
||||
:return:
|
||||
"""
|
||||
from ops.utils import update_or_create_ansible_task
|
||||
task_name = const.UPDATE_ASSETS_HARDWARE_PERIOD_TASK_NAME
|
||||
if cache.get(const.UPDATE_ASSETS_HARDWARE_PERIOD_LOCK_KEY) == 1:
|
||||
msg = "Task {} is running or before long, passed this time".format(
|
||||
task_name
|
||||
)
|
||||
logger.debug(msg)
|
||||
return {}
|
||||
# Todo: set cache but not update, because we want also set it to as a
|
||||
# minimum update time too
|
||||
cache.set(const.UPDATE_ASSETS_HARDWARE_PERIOD_LOCK_KEY, 1, CACHE_MAX_TIME)
|
||||
assets = Asset.objects.filter(type__in=['Server', 'VM'])
|
||||
return update_assets_hardware_info(assets, task_name=task_name)
|
||||
hostname_list = [asset.hostname for asset in Asset.objects.all()]
|
||||
tasks = const.UPDATE_ASSETS_HARDWARE_TASKS
|
||||
|
||||
# Only create, schedule by celery beat
|
||||
_ = update_or_create_ansible_task(
|
||||
task_name, hosts=hostname_list, tasks=tasks, pattern='all',
|
||||
options=const.TASK_OPTIONS, run_as_admin=True, created_by='System',
|
||||
interval=60*60*24, is_periodic=True, callback=update_assets_hardware_info.name,
|
||||
)
|
||||
|
||||
|
||||
## ADMIN USER CONNECTIVE ##
|
||||
|
||||
@shared_task
|
||||
def test_admin_user_connectability(admin_user, force=False):
|
||||
"""
|
||||
Test asset admin user can connect or not. Using ansible api do that
|
||||
:param admin_user:
|
||||
:param force: Force update
|
||||
:return:
|
||||
"""
|
||||
from ops.utils import create_or_update_task
|
||||
def update_admin_user_connectability_info(result, **kwargs):
|
||||
admin_user = kwargs.get("admin_user")
|
||||
task_name = kwargs.get("task_name")
|
||||
if admin_user is None and task_name is not None:
|
||||
admin_user = task_name.split(":")[-1]
|
||||
|
||||
task_name = const.TEST_ADMIN_USER_CONN_TASK_NAME.format(admin_user.name)
|
||||
lock_key = const.TEST_ADMIN_USER_CONN_LOCK_KEY.format(admin_user.name)
|
||||
_, summary = result
|
||||
cache_key = const.ADMIN_USER_CONN_CACHE_KEY.format(admin_user)
|
||||
cache.set(cache_key, summary, CACHE_MAX_TIME)
|
||||
|
||||
if cache.get(lock_key, 0) == 1 and not force:
|
||||
logger.debug("Task {} is running or before along, passed this time")
|
||||
return {}
|
||||
|
||||
assets = admin_user.get_related_assets()
|
||||
hosts = [asset.hostname for asset in assets]
|
||||
tasks = const.TEST_ADMIN_USER_CONN_TASKS
|
||||
task = create_or_update_task(
|
||||
task_name=task_name, hosts=hosts, tasks=tasks, pattern='all',
|
||||
options=const.TASK_OPTIONS, run_as_admin=True, created_by='System',
|
||||
)
|
||||
cache.set(lock_key, 1, CACHE_MAX_TIME)
|
||||
result = task.run()
|
||||
cache_key = const.ADMIN_USER_CONN_CACHE_KEY.format(admin_user.name)
|
||||
cache.set(cache_key, result.results_summary, CACHE_MAX_TIME)
|
||||
|
||||
for i in result.results_summary.get('contacted', []):
|
||||
for i in summary.get('contacted', []):
|
||||
asset_conn_cache_key = const.ASSET_ADMIN_CONN_CACHE_KEY.format(i)
|
||||
cache.set(asset_conn_cache_key, 1, CACHE_MAX_TIME)
|
||||
|
||||
for i, msg in result.results_summary.get('dark', {}).items():
|
||||
for i, msg in summary.get('dark', {}).items():
|
||||
asset_conn_cache_key = const.ASSET_ADMIN_CONN_CACHE_KEY.format(i)
|
||||
cache.set(asset_conn_cache_key, 0, CACHE_MAX_TIME)
|
||||
logger.error(msg)
|
||||
|
||||
return result.results_summary
|
||||
|
||||
|
||||
@shared_task
|
||||
def test_admin_user_connectability_period():
|
||||
if cache.get(const.TEST_ADMIN_USER_CONN_PERIOD_LOCK_KEY) == 1:
|
||||
msg = "{} task is running or before long, passed this time".format(
|
||||
const.TEST_ADMIN_USER_CONN_PERIOD_TASK_NAME
|
||||
)
|
||||
logger.debug(msg)
|
||||
return
|
||||
def test_admin_user_connectability_util(admin_user, task_name):
|
||||
"""
|
||||
Test asset admin user can connect or not. Using ansible api do that
|
||||
:param admin_user:
|
||||
:param task_name:
|
||||
:param force: Force update
|
||||
:return:
|
||||
"""
|
||||
from ops.utils import update_or_create_ansible_task
|
||||
|
||||
logger.debug("Task {} start".format(const.TEST_ADMIN_USER_CONN_TASK_NAME))
|
||||
cache.set(const.TEST_ADMIN_USER_CONN_PERIOD_LOCK_KEY, 1, CACHE_MAX_TIME)
|
||||
admin_users = AdminUser.objects.all()
|
||||
for admin_user in admin_users:
|
||||
test_admin_user_connectability(admin_user)
|
||||
|
||||
|
||||
@shared_task
|
||||
def test_admin_user_connectability_manual(asset, task_name=None):
|
||||
from ops.utils import create_or_update_task
|
||||
if task_name is None:
|
||||
task_name = const.TEST_ASSET_CONN_TASK_NAME
|
||||
hosts = [asset.hostname]
|
||||
assets = admin_user.get_related_assets()
|
||||
hosts = [asset.hostname for asset in assets]
|
||||
tasks = const.TEST_ADMIN_USER_CONN_TASKS
|
||||
task = create_or_update_task(
|
||||
task_name, tasks=tasks, hosts=hosts, run_as_admin=True,
|
||||
created_by='System', options=const.TASK_OPTIONS, pattern='all',
|
||||
task, created = update_or_create_ansible_task(
|
||||
task_name=task_name, hosts=hosts, tasks=tasks, pattern='all',
|
||||
options=const.TASK_OPTIONS, run_as_admin=True, created_by='System',
|
||||
)
|
||||
result = task.run()
|
||||
update_admin_user_connectability_info(result, admin_user=admin_user.name)
|
||||
return result
|
||||
|
||||
if result.results_summary['dark']:
|
||||
|
||||
@celery_app.task
|
||||
@register_as_period_task(interval=3600)
|
||||
@after_app_ready_start
|
||||
@after_app_shutdown_clean
|
||||
def test_admin_user_connectability_period():
|
||||
"""
|
||||
A period task that update the ansible task period
|
||||
"""
|
||||
from ops.utils import update_or_create_ansible_task
|
||||
admin_users = AdminUser.objects.all()
|
||||
for admin_user in admin_users:
|
||||
task_name = const.TEST_ADMIN_USER_CONN_PERIOD_TASK_NAME.format(admin_user.name)
|
||||
assets = admin_user.get_related_assets()
|
||||
hosts = [asset.hostname for asset in assets]
|
||||
tasks = const.TEST_ADMIN_USER_CONN_TASKS
|
||||
_ = update_or_create_ansible_task(
|
||||
task_name=task_name, hosts=hosts, tasks=tasks, pattern='all',
|
||||
options=const.TASK_OPTIONS, run_as_admin=True, created_by='System',
|
||||
interval=3600, is_periodic=True,
|
||||
callback=update_admin_user_connectability_info.name,
|
||||
)
|
||||
|
||||
|
||||
@shared_task
|
||||
def test_admin_user_connectability_manual(admin_user):
|
||||
task_name = const.TEST_ADMIN_USER_CONN_MANUAL_TASK_NAME.format(admin_user.name)
|
||||
return test_admin_user_connectability_util.delay(admin_user, task_name)
|
||||
|
||||
|
||||
@shared_task
|
||||
def test_asset_connectability_manual(asset):
|
||||
from ops.utils import update_or_create_ansible_task
|
||||
|
||||
task_name = const.TEST_ASSET_CONN_TASK_NAME
|
||||
assets = [asset]
|
||||
hosts = [asset.hostname for asset in assets]
|
||||
tasks = const.TEST_ADMIN_USER_CONN_TASKS
|
||||
task, created = update_or_create_ansible_task(
|
||||
task_name=task_name, hosts=hosts, tasks=tasks, pattern='all',
|
||||
options=const.TASK_OPTIONS, run_as_admin=True, created_by='System',
|
||||
)
|
||||
result = task.run()
|
||||
summary = result[1]
|
||||
if summary.get('dark'):
|
||||
cache.set(const.ASSET_ADMIN_CONN_CACHE_KEY.format(asset.hostname), 0, CACHE_MAX_TIME)
|
||||
return False, result.results_summary['dark']
|
||||
return False, summary['dark']
|
||||
else:
|
||||
cache.set(const.ASSET_ADMIN_CONN_CACHE_KEY.format(asset.hostname), 1, CACHE_MAX_TIME)
|
||||
return True, ""
|
||||
|
||||
|
||||
@receiver(post_save, sender=Asset, dispatch_uid="my_unique_identifier")
|
||||
def update_asset_conn_info_on_created(sender, instance=None, created=False,
|
||||
**kwargs):
|
||||
if instance and created:
|
||||
task_name = 'TEST-ASSET-CONN-WHEN-CREATED-{}'.format(instance)
|
||||
msg = "Receive asset {} create signal, test asset connectability".format(
|
||||
instance
|
||||
)
|
||||
logger.debug(msg)
|
||||
test_asset_connectability_manual.delay(instance, task_name)
|
||||
|
||||
|
||||
## System user connective ##
|
||||
|
||||
|
||||
@shared_task
|
||||
def test_system_user_connectability(system_user, force=False):
|
||||
def update_system_user_connectablity_info(result, **kwargs):
|
||||
summary = result[1]
|
||||
task_name = kwargs.get("task_name")
|
||||
system_user = kwargs.get("system_user")
|
||||
if system_user is None:
|
||||
system_user = task_name.split(":")[-1]
|
||||
cache_key = const.SYSTEM_USER_CONN_CACHE_KEY.format(system_user)
|
||||
cache.set(cache_key, summary, CACHE_MAX_TIME)
|
||||
|
||||
|
||||
@shared_task
|
||||
def test_system_user_connectability_util(system_user, task_name):
|
||||
"""
|
||||
Test system cant connect his assets or not.
|
||||
:param system_user:
|
||||
:param force
|
||||
:param task_name:
|
||||
:return:
|
||||
"""
|
||||
from ops.utils import create_or_update_task
|
||||
lock_key = const.TEST_SYSTEM_USER_CONN_LOCK_KEY.format(system_user.name)
|
||||
task_name = const.TEST_SYSTEM_USER_CONN_TASK_NAME.format(system_user.name)
|
||||
if cache.get(lock_key, 0) == 1 and not force:
|
||||
logger.debug("Task {} is running or before long, passed this time".format(task_name))
|
||||
return {}
|
||||
from ops.utils import update_or_create_ansible_task
|
||||
assets = system_user.get_clusters_assets()
|
||||
hosts = [asset.hostname for asset in assets]
|
||||
tasks = const.TEST_SYSTEM_USER_CONN_TASKS
|
||||
task = create_or_update_task(
|
||||
task, created = update_or_create_ansible_task(
|
||||
task_name, hosts=hosts, tasks=tasks, pattern='all',
|
||||
options=const.TASK_OPTIONS,
|
||||
run_as=system_user.name, created_by="System",
|
||||
)
|
||||
cache.set(lock_key, 1, CACHE_MAX_TIME)
|
||||
result = task.run()
|
||||
cache_key = const.SYSTEM_USER_CONN_CACHE_KEY.format(system_user.name)
|
||||
print("Set cache: {} {}".format(cache_key, result.results_summary))
|
||||
cache.set(cache_key, result.results_summary, CACHE_MAX_TIME)
|
||||
return result.results_summary
|
||||
update_system_user_connectablity_info(result, system_user=system_user.name)
|
||||
return result
|
||||
|
||||
|
||||
@shared_task
|
||||
def test_system_user_connectability_manual(system_user):
|
||||
task_name = const.TEST_SYSTEM_USER_CONN_MANUAL_TASK_NAME.format(system_user.name)
|
||||
return test_system_user_connectability_util(system_user, task_name)
|
||||
|
||||
|
||||
@shared_task
|
||||
@register_as_period_task(interval=3600)
|
||||
@after_app_ready_start
|
||||
@after_app_shutdown_clean
|
||||
def test_system_user_connectability_period():
|
||||
lock_key = const.TEST_SYSTEM_USER_CONN_LOCK_KEY
|
||||
if cache.get(lock_key) == 1:
|
||||
logger.debug("{} task is running, passed this time".format(
|
||||
const.TEST_SYSTEM_USER_CONN_PERIOD_TASK_NAME
|
||||
))
|
||||
return
|
||||
from ops.utils import update_or_create_ansible_task
|
||||
system_users = SystemUser.objects.all()
|
||||
for system_user in system_users:
|
||||
task_name = const.TEST_SYSTEM_USER_CONN_PERIOD_TASK_NAME.format(
|
||||
system_user.name
|
||||
)
|
||||
assets = system_user.get_clusters_assets()
|
||||
hosts = [asset.hostname for asset in assets]
|
||||
tasks = const.TEST_SYSTEM_USER_CONN_TASKS
|
||||
_ = update_or_create_ansible_task(
|
||||
task_name=task_name, hosts=hosts, tasks=tasks, pattern='all',
|
||||
options=const.TASK_OPTIONS, run_as_admin=False, run_as=system_user.name,
|
||||
created_by='System', interval=3600, is_periodic=True,
|
||||
callback=update_admin_user_connectability_info.name,
|
||||
)
|
||||
|
||||
logger.debug("Task {} start".format(const.TEST_SYSTEM_USER_CONN_PERIOD_TASK_NAME))
|
||||
cache.set(lock_key, 1, CACHE_MAX_TIME)
|
||||
for system_user in SystemUser.objects.all():
|
||||
test_system_user_connectability(system_user)
|
||||
|
||||
#### Push system user tasks ####
|
||||
|
||||
def get_push_system_user_tasks(system_user):
|
||||
tasks = [
|
||||
|
@ -270,75 +351,48 @@ def get_push_system_user_tasks(system_user):
|
|||
|
||||
|
||||
@shared_task
|
||||
def push_system_user(system_user, assets, task_name=None):
|
||||
from ops.utils import create_or_update_task
|
||||
def push_system_user_util(system_user, task_name):
|
||||
from ops.utils import update_or_create_ansible_task
|
||||
|
||||
if system_user.auto_push and assets:
|
||||
if task_name is None:
|
||||
task_name = 'PUSH-SYSTEM-USER-{}'.format(system_user.name)
|
||||
tasks = get_push_system_user_tasks(system_user)
|
||||
assets = system_user.get_clusters_assets()
|
||||
hosts = [asset.hostname for asset in assets]
|
||||
task, _ = update_or_create_ansible_task(
|
||||
task_name=task_name, hosts=hosts, tasks=tasks, pattern='all',
|
||||
options=const.TASK_OPTIONS, run_as_admin=True, created_by='System'
|
||||
)
|
||||
return task.run()
|
||||
|
||||
|
||||
@shared_task
|
||||
def push_system_user_to_cluster_assets_manual(system_user):
|
||||
task_name = const.PUSH_SYSTEM_USER_MANUAL_TASK_NAME.format(system_user.name)
|
||||
return push_system_user_util(system_user, task_name)
|
||||
|
||||
|
||||
@shared_task
|
||||
@register_as_period_task(interval=3600)
|
||||
@after_app_ready_start
|
||||
@after_app_shutdown_clean
|
||||
def push_system_user_period():
|
||||
from ops.utils import update_or_create_ansible_task
|
||||
|
||||
for system_user in SystemUser.objects.filter(auto_push=True):
|
||||
assets = system_user.get_clusters_assets()
|
||||
task_name = const.PUSH_SYSTEM_USER_PERIOD_TASK_NAME.format(system_user.name)
|
||||
hosts = [asset.hostname for asset in assets]
|
||||
tasks = get_push_system_user_tasks(system_user)
|
||||
|
||||
task = create_or_update_task(
|
||||
_ = update_or_create_ansible_task(
|
||||
task_name=task_name, hosts=hosts, tasks=tasks, pattern='all',
|
||||
options=const.TASK_OPTIONS, run_as_admin=True, created_by='System'
|
||||
options=const.TASK_OPTIONS, run_as_admin=True, created_by='System',
|
||||
interval=60*60*24, is_periodic=True,
|
||||
)
|
||||
result = task.run()
|
||||
for i in result.results_summary.get('contacted'):
|
||||
logger.debug("Push system user {} to {} [OK]".format(
|
||||
system_user.name, i
|
||||
))
|
||||
for i in result.results_summary.get('dark'):
|
||||
logger.error("Push system user {} to {} [FAILED]".format(
|
||||
system_user.name, i
|
||||
))
|
||||
return result.results_summary
|
||||
else:
|
||||
msg = "Task {} does'nt execute, because auto_push " \
|
||||
"is not True, or not assets".format(task_name)
|
||||
logger.debug(msg)
|
||||
return {}
|
||||
|
||||
|
||||
@shared_task
|
||||
def push_system_user_to_cluster_assets(system_user, force=False):
|
||||
lock_key = const.PUSH_SYSTEM_USER_LOCK_KEY
|
||||
task_name = const.PUSH_SYSTEM_USER_TASK_NAME.format(system_user.name)
|
||||
if cache.get(lock_key, 0) == 1 and not force:
|
||||
msg = "Task {} is running or before long, passed this time".format(
|
||||
task_name
|
||||
)
|
||||
logger.debug(msg)
|
||||
return {}
|
||||
|
||||
logger.debug("Task {} start".format(task_name))
|
||||
assets = system_user.get_clusters_assets()
|
||||
summary = push_system_user(system_user, assets, task_name)
|
||||
return summary
|
||||
|
||||
|
||||
@shared_task
|
||||
def push_system_user_period():
|
||||
task_name = const.PUSH_SYSTEM_USER_PERIOD_TASK_NAME
|
||||
if cache.get(const.PUSH_SYSTEM_USER_PERIOD_LOCK_KEY) == 1:
|
||||
msg = "Task {} is running or before long, passed this time".format(
|
||||
task_name
|
||||
)
|
||||
logger.debug(msg)
|
||||
return
|
||||
logger.debug("Task {} start".format(task_name))
|
||||
cache.set(const.PUSH_SYSTEM_USER_PERIOD_LOCK_KEY, 1, timeout=CACHE_MAX_TIME)
|
||||
|
||||
for system_user in SystemUser.objects.filter(auto_push=True):
|
||||
push_system_user_to_cluster_assets(system_user)
|
||||
|
||||
|
||||
@shared_task
|
||||
def push_asset_system_users(asset, system_users=None, task_name=None):
|
||||
from ops.utils import create_or_update_task
|
||||
if task_name is None:
|
||||
task_name = "PUSH-ASSET-SYSTEM-USER-{}".format(asset.hostname)
|
||||
def push_asset_system_users_util(asset, task_name, system_users=None):
|
||||
from ops.utils import update_or_create_ansible_task
|
||||
|
||||
if system_users is None:
|
||||
system_users = asset.cluster.systemuser_set.all()
|
||||
|
@ -349,75 +403,38 @@ def push_asset_system_users(asset, system_users=None, task_name=None):
|
|||
tasks.extend(get_push_system_user_tasks(system_user))
|
||||
|
||||
hosts = [asset.hostname]
|
||||
|
||||
task = create_or_update_task(
|
||||
task, _ = update_or_create_ansible_task(
|
||||
task_name=task_name, hosts=hosts, tasks=tasks, pattern='all',
|
||||
options=const.TASK_OPTIONS, run_as_admin=True, created_by='System'
|
||||
)
|
||||
result = task.run()
|
||||
return result.results_summary
|
||||
|
||||
|
||||
@receiver(post_save, sender=Asset, dispatch_uid="my_unique_identifier")
|
||||
def update_asset_info_when_created(sender, instance=None, created=False, **kwargs):
|
||||
if instance and created:
|
||||
msg = "Receive asset {} create signal, update asset hardware info".format(
|
||||
instance
|
||||
)
|
||||
logger.debug(msg)
|
||||
task_name = "UPDATE-ASSET-HARDWARE-INFO-WHEN-CREATED"
|
||||
update_assets_hardware_info.delay([instance], task_name)
|
||||
|
||||
|
||||
@receiver(post_save, sender=Asset, dispatch_uid="my_unique_identifier")
|
||||
def update_asset_conn_info_on_created(sender, instance=None, created=False, **kwargs):
|
||||
if instance and created:
|
||||
task_name = 'TEST-ASSET-CONN-WHEN-CREATED-{}'.format(instance)
|
||||
msg = "Receive asset {} create signal, test asset connectability".format(
|
||||
instance
|
||||
)
|
||||
logger.debug(msg)
|
||||
test_admin_user_connectability_manual.delay(instance, task_name)
|
||||
return task.run()
|
||||
|
||||
|
||||
@receiver(post_save, sender=Asset, dispatch_uid="my_unique_identifier")
|
||||
def push_system_user_on_created(sender, instance=None, created=False, **kwargs):
|
||||
if instance and created:
|
||||
task_name = 'PUSH-SYSTEM-USER-WHEN-ASSET-CREATED-{}'.format(instance)
|
||||
task_name = const.PUSH_SYSTEM_USERS_ON_ASSET_CREATE_TASK_NAME
|
||||
system_users = instance.cluster.systemuser_set.all()
|
||||
msg = "Receive asset {} create signal, push system users".format(
|
||||
instance
|
||||
)
|
||||
logger.debug(msg)
|
||||
push_asset_system_users.delay(instance, system_users, task_name=task_name)
|
||||
push_asset_system_users_util.delay(instance, system_users, task_name=task_name)
|
||||
|
||||
|
||||
@receiver(post_save, sender=SystemUser)
|
||||
def push_system_user_on_auth_change(sender, instance=None, update_fields=None, **kwargs):
|
||||
fields_check = {'_password', '_private_key', '_public_key'}
|
||||
auth_changed = update_fields & fields_check if update_fields else None
|
||||
if instance and instance.auto_push and auth_changed:
|
||||
logger.debug("System user `{}` auth changed, push it".format(instance.name))
|
||||
task_name = "PUSH-SYSTEM-USER-ON-CREATED-{}".format(instance.name)
|
||||
push_system_user_to_cluster_assets.delay(instance, task_name)
|
||||
def push_system_user_on_change(sender, instance=None, update_fields=None, **kwargs):
|
||||
if instance and instance.auto_push:
|
||||
logger.debug("System user `{}` changed, push it".format(instance.name))
|
||||
task_name = "PUSH SYSTEM USER ON CREATED: {}".format(instance.name)
|
||||
push_system_user_util.delay(instance, task_name)
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
celery_app.conf['CELERYBEAT_SCHEDULE'].update(
|
||||
{
|
||||
'update_assets_hardware_period': {
|
||||
'task': 'assets.tasks.update_assets_hardware_period',
|
||||
'schedule': 60*60*24,
|
||||
'args': (),
|
||||
},
|
||||
'test-admin-user-connectability_period': {
|
||||
'task': 'assets.tasks.test_admin_user_connectability_period',
|
||||
'schedule': 60*60,
|
||||
'args': (),
|
||||
},
|
||||
'push_system_user_period': {
|
||||
'task': 'assets.tasks.push_system_user_period',
|
||||
'schedule': 60*60,
|
||||
'args': (),
|
||||
}
|
||||
}
|
||||
)
|
||||
|
|
|
@ -90,7 +90,7 @@
|
|||
}
|
||||
}
|
||||
$(document).ready(function () {
|
||||
$('.select2').select2();
|
||||
$('.select2').select2({ dropdownAutoWidth : true, width: 'auto' });
|
||||
authFieldsDisplay();
|
||||
$(auto_generate_key).change(function () {
|
||||
authFieldsDisplay();
|
||||
|
|
|
@ -56,7 +56,7 @@
|
|||
{% block custom_foot_js %}
|
||||
<script>
|
||||
$(document).ready(function () {
|
||||
$('.select2').select2();
|
||||
$('.select2').select2({ dropdownAutoWidth : true, width: 'auto' });
|
||||
})
|
||||
</script>
|
||||
{% endblock %}
|
|
@ -157,7 +157,7 @@ function bindToCluster(clusters) {
|
|||
|
||||
jumpserver.cluster_selected = {};
|
||||
$(document).ready(function () {
|
||||
$('.select2').select2().on('select2:select', function(evt) {
|
||||
$('.select2').select2({ dropdownAutoWidth : true, width: 'auto' });.on('select2:select', function(evt) {
|
||||
var data = evt.params.data;
|
||||
jumpserver.cluster_selected[data.id] = data.text;
|
||||
}).on('select2:unselect', function(evt) {
|
||||
|
|
|
@ -30,7 +30,7 @@
|
|||
{% block custom_foot_js %}
|
||||
<script>
|
||||
$(document).ready(function () {
|
||||
$('.select2').select2();
|
||||
$('.select2').select2({ dropdownAutoWidth : true, width: 'auto' });
|
||||
}).on('click', '.field-tag', function() {
|
||||
changeField(this);
|
||||
}).on('click', '#change_all', function () {
|
||||
|
|
|
@ -47,7 +47,7 @@
|
|||
{% block custom_foot_js %}
|
||||
<script>
|
||||
$(document).ready(function () {
|
||||
$('.select2').select2();
|
||||
$('.select2').select2({ dropdownAutoWidth : true, width: 'auto' });
|
||||
{# $("#id_tags").select2({#}
|
||||
{# tags: true,#}
|
||||
{# maximumSelectionLength: 8 //最多能够选择的个数#}
|
||||
|
|
|
@ -81,7 +81,7 @@
|
|||
{% block custom_foot_js %}
|
||||
<script type="text/javascript">
|
||||
$(document).ready(function () {
|
||||
$('.select2').select2();
|
||||
$('.select2').select2({ dropdownAutoWidth : true, width: 'auto' });
|
||||
$('.select2-system-user').select2();
|
||||
});
|
||||
|
||||
|
|
|
@ -184,7 +184,7 @@ function initTable() {
|
|||
}
|
||||
|
||||
$(document).ready(function () {
|
||||
$('.select2').select2();
|
||||
$('.select2').select2({ dropdownAutoWidth : true, width: 'auto' });
|
||||
|
||||
$('.select2.asset-select').select2()
|
||||
.on('select2:select', function(evt) {
|
||||
|
|
|
@ -49,9 +49,6 @@ $(document).ready(function(){
|
|||
"aaSorting": [[2, "asc"]],
|
||||
"aoColumnDefs": [ { "bSortable": false, "aTargets": [ 0 ] }],
|
||||
"bAutoWidth": false,
|
||||
"language": {
|
||||
"url": "/static/js/plugins/dataTables/i18n/zh-hans.json"
|
||||
},
|
||||
columns: [
|
||||
{data: "checkbox"},
|
||||
{data: "id"},
|
||||
|
|
|
@ -181,7 +181,7 @@ function initTable() {
|
|||
}
|
||||
|
||||
$(document).ready(function () {
|
||||
$('.select2').select2()
|
||||
$('.select2').select2({ dropdownAutoWidth : true, width: 'auto' });
|
||||
.on("select2:select", function (evt) {
|
||||
var data = evt.params.data;
|
||||
jumpserver.assets_selected[data.id] = data.text;
|
||||
|
|
|
@ -69,7 +69,10 @@
|
|||
{% block custom_foot_js %}
|
||||
<script>
|
||||
$(document).ready(function () {
|
||||
$('.select2').select2();
|
||||
$('.select2').select2({
|
||||
dropdownAutoWidth : true,
|
||||
width: 'auto'
|
||||
});
|
||||
})
|
||||
</script>
|
||||
{% endblock %}
|
|
@ -151,7 +151,7 @@
|
|||
<script>
|
||||
|
||||
$(document).ready(function () {
|
||||
$('.select2').select2();
|
||||
$('.select2').select2({ dropdownAutoWidth : true, width: 'auto' });
|
||||
})
|
||||
.on('click', '.btn-delete-cluster', function () {
|
||||
var name = "{{ cluster.name }}";
|
||||
|
|
|
@ -125,7 +125,7 @@ function initAssetsTable() {
|
|||
}
|
||||
|
||||
$(document).ready(function () {
|
||||
$('.select2').select2()
|
||||
$('.select2').select2({ dropdownAutoWidth : true, width: 'auto' });
|
||||
.on("select2:select", function (evt) {
|
||||
var data = evt.params.data;
|
||||
jumpserver.assets_selected[data.id] = data.text;
|
||||
|
|
|
@ -212,7 +212,7 @@ function updateSystemUserCluster(clusters) {
|
|||
}
|
||||
jumpserver.cluster_selected = {};
|
||||
$(document).ready(function () {
|
||||
$('.select2').select2()
|
||||
$('.select2').select2({ dropdownAutoWidth : true, width: 'auto' });
|
||||
.on('select2:select', function(evt) {
|
||||
var data = evt.params.data;
|
||||
jumpserver.cluster_selected[data.id] = data.text;
|
||||
|
|
|
@ -18,7 +18,7 @@
|
|||
{% block custom_foot_js %}
|
||||
<script>
|
||||
$(document).ready(function () {
|
||||
$('.select2').select2();
|
||||
$('.select2').select2({ dropdownAutoWidth : true, width: 'auto' });
|
||||
})
|
||||
</script>
|
||||
{% endblock %}
|
|
@ -34,6 +34,7 @@
|
|||
<th class="text-center">{% trans 'Hardware' %}</th>
|
||||
<th class="text-center">{% trans 'Active' %}</th>
|
||||
<th class="text-center">{% trans 'Connective' %}</th>
|
||||
<th class="text-center">{% trans 'Action' %}</th>
|
||||
</tr>
|
||||
</thead>
|
||||
<tbody>
|
||||
|
@ -70,12 +71,18 @@ function initTable() {
|
|||
} else {
|
||||
$(td).html('<i class="fa fa-circle text-navy"></i>')
|
||||
}
|
||||
}},
|
||||
{targets: 9, createdCell: function (td, cellData, rowData) {
|
||||
var conn_btn = '<a href="{% url "terminal:web-terminal" %}?id={{ DEFAULT_PK }}" class="btn btn-xs btn-info">{% trans "Connect" %}</a>'.replace("{{ DEFAULT_PK }}", cellData);
|
||||
$(td).html(conn_btn)
|
||||
}}
|
||||
],
|
||||
ajax_url: '{% url "api-assets:asset-list" %}',
|
||||
columns: [{data: "id"}, {data: "hostname" }, {data: "ip" }, {data: "port" },
|
||||
columns: [
|
||||
{data: "id"}, {data: "hostname" }, {data: "ip" }, {data: "port" },
|
||||
{data: "get_type_display" }, {data: "get_env_display"}, {data: "hardware_info"},
|
||||
{data: "is_active" }, {data: "is_connective"}],
|
||||
{data: "is_active" }, {data: "is_connective"}, {data: "id"}
|
||||
],
|
||||
op_html: $('#actions').html()
|
||||
};
|
||||
return jumpserver.initDataTable(options);
|
||||
|
|
|
@ -28,7 +28,7 @@ from common.utils import get_object_or_none, get_logger, is_uuid
|
|||
from .. import forms
|
||||
from ..models import Asset, AssetGroup, AdminUser, Cluster, SystemUser
|
||||
from ..hands import AdminUserRequiredMixin
|
||||
from ..tasks import update_assets_hardware_info
|
||||
from ..tasks import update_assets_hardware_info_util
|
||||
|
||||
|
||||
__all__ = [
|
||||
|
@ -314,10 +314,6 @@ class BulkImportAssetView(AdminUserRequiredMixin, JSONResponseMixin, FormView):
|
|||
except Exception as e:
|
||||
failed.append('%s: %s' % (asset_dict['hostname'], str(e)))
|
||||
|
||||
if assets:
|
||||
update_assets_hardware_info.delay([asset._to_secret_json() for asset in assets])
|
||||
|
||||
|
||||
data = {
|
||||
'created': created,
|
||||
'created_info': 'Created {}'.format(len(created)),
|
||||
|
|
|
@ -1,8 +1,16 @@
|
|||
# ~*~ coding: utf-8 ~*~
|
||||
|
||||
import os
|
||||
import json
|
||||
from functools import wraps
|
||||
|
||||
from celery import Celery
|
||||
from celery import Celery, subtask
|
||||
from celery.signals import worker_ready, worker_shutdown
|
||||
from django.db.utils import ProgrammingError, OperationalError
|
||||
|
||||
from .utils import get_logger
|
||||
|
||||
logger = get_logger(__file__)
|
||||
|
||||
# set the default Django settings module for the 'celery' program.
|
||||
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'jumpserver.settings')
|
||||
|
@ -13,10 +21,174 @@ app = Celery('jumpserver')
|
|||
|
||||
# Using a string here means the worker will not have to
|
||||
# pickle the object when using Windows.
|
||||
app.config_from_object('django.conf:settings')
|
||||
app.config_from_object('django.conf:settings', namespace='CELERY')
|
||||
app.autodiscover_tasks(lambda: [app_config.split('.')[0] for app_config in settings.INSTALLED_APPS])
|
||||
|
||||
app.conf.update(
|
||||
CELERYBEAT_SCHEDULE={
|
||||
|
||||
def create_or_update_celery_periodic_tasks(tasks):
|
||||
from django_celery_beat.models import PeriodicTask, IntervalSchedule, CrontabSchedule
|
||||
"""
|
||||
:param tasks: {
|
||||
'add-every-monday-morning': {
|
||||
'task': 'tasks.add' # A registered celery task,
|
||||
'interval': 30,
|
||||
'crontab': "30 7 * * *",
|
||||
'args': (16, 16),
|
||||
'kwargs': {},
|
||||
'enabled': False,
|
||||
},
|
||||
}
|
||||
)
|
||||
:return:
|
||||
"""
|
||||
# Todo: check task valid, task and callback must be a celery task
|
||||
for name, detail in tasks.items():
|
||||
interval = None
|
||||
crontab = None
|
||||
try:
|
||||
IntervalSchedule.objects.all().count()
|
||||
except (ProgrammingError, OperationalError):
|
||||
return None
|
||||
|
||||
if isinstance(detail.get("interval"), int):
|
||||
intervals = IntervalSchedule.objects.filter(
|
||||
every=detail["interval"], period=IntervalSchedule.SECONDS
|
||||
)
|
||||
if intervals:
|
||||
interval = intervals[0]
|
||||
else:
|
||||
interval = IntervalSchedule.objects.create(
|
||||
every=detail['interval'],
|
||||
period=IntervalSchedule.SECONDS,
|
||||
)
|
||||
elif isinstance(detail.get("crontab"), str):
|
||||
try:
|
||||
minute, hour, day, month, week = detail["crontab"].split()
|
||||
except ValueError:
|
||||
raise SyntaxError("crontab is not valid")
|
||||
kwargs = dict(
|
||||
minute=minute, hour=hour, day_of_week=week,
|
||||
day_of_month=day, month_of_year=month,
|
||||
)
|
||||
contabs = CrontabSchedule.objects.filter(
|
||||
**kwargs
|
||||
)
|
||||
if contabs:
|
||||
crontab = contabs[0]
|
||||
else:
|
||||
crontab = CrontabSchedule.objects.create(**kwargs)
|
||||
else:
|
||||
raise SyntaxError("Schedule is not valid")
|
||||
|
||||
defaults = dict(
|
||||
interval=interval,
|
||||
crontab=crontab,
|
||||
name=name,
|
||||
task=detail['task'],
|
||||
args=json.dumps(detail.get('args', [])),
|
||||
kwargs=json.dumps(detail.get('kwargs', {})),
|
||||
enabled=detail.get('enabled', True),
|
||||
)
|
||||
|
||||
task = PeriodicTask.objects.update_or_create(
|
||||
defaults=defaults, name=name,
|
||||
)
|
||||
return task
|
||||
|
||||
|
||||
def disable_celery_periodic_task(task_name):
|
||||
from django_celery_beat.models import PeriodicTask
|
||||
PeriodicTask.objects.filter(name=task_name).update(enabled=False)
|
||||
|
||||
|
||||
def delete_celery_periodic_task(task_name):
|
||||
from django_celery_beat.models import PeriodicTask
|
||||
PeriodicTask.objects.filter(name=task_name).delete()
|
||||
|
||||
|
||||
__REGISTER_PERIODIC_TASKS = []
|
||||
__AFTER_APP_SHUTDOWN_CLEAN_TASKS = []
|
||||
__AFTER_APP_READY_RUN_TASKS = []
|
||||
|
||||
|
||||
def register_as_period_task(crontab=None, interval=None):
|
||||
"""
|
||||
Warning: Task must be have not any args and kwargs
|
||||
:param crontab: "* * * * *"
|
||||
:param interval: 60*60*60
|
||||
:return:
|
||||
"""
|
||||
if crontab is None and interval is None:
|
||||
raise SyntaxError("Must set crontab or interval one")
|
||||
|
||||
def decorate(func):
|
||||
if crontab is None and interval is None:
|
||||
raise SyntaxError("Interval and crontab must set one")
|
||||
|
||||
# Because when this decorator run, the task was not created,
|
||||
# So we can't use func.name
|
||||
name = '{func.__module__}.{func.__name__}'.format(func=func)
|
||||
if name not in __REGISTER_PERIODIC_TASKS:
|
||||
create_or_update_celery_periodic_tasks({
|
||||
name: {
|
||||
'task': name,
|
||||
'interval': interval,
|
||||
'crontab': crontab,
|
||||
'args': (),
|
||||
'enabled': True,
|
||||
}
|
||||
})
|
||||
__REGISTER_PERIODIC_TASKS.append(name)
|
||||
|
||||
@wraps(func)
|
||||
def wrapper(*args, **kwargs):
|
||||
return func(*args, **kwargs)
|
||||
return wrapper
|
||||
return decorate
|
||||
|
||||
|
||||
def after_app_ready_start(func):
|
||||
# Because when this decorator run, the task was not created,
|
||||
# So we can't use func.name
|
||||
name = '{func.__module__}.{func.__name__}'.format(func=func)
|
||||
if name not in __AFTER_APP_READY_RUN_TASKS:
|
||||
__AFTER_APP_READY_RUN_TASKS.append(name)
|
||||
|
||||
@wraps(func)
|
||||
def decorate(*args, **kwargs):
|
||||
return func(*args, **kwargs)
|
||||
|
||||
return decorate
|
||||
|
||||
|
||||
def after_app_shutdown_clean(func):
|
||||
# Because when this decorator run, the task was not created,
|
||||
# So we can't use func.name
|
||||
name = '{func.__module__}.{func.__name__}'.format(func=func)
|
||||
if name not in __AFTER_APP_READY_RUN_TASKS:
|
||||
__AFTER_APP_SHUTDOWN_CLEAN_TASKS.append(name)
|
||||
|
||||
@wraps(func)
|
||||
def decorate(*args, **kwargs):
|
||||
return func(*args, **kwargs)
|
||||
|
||||
return decorate
|
||||
|
||||
|
||||
@worker_ready.connect
|
||||
def on_app_ready(sender=None, headers=None, body=None, **kwargs):
|
||||
logger.debug("App ready signal recv")
|
||||
logger.debug("Start need start task: [{}]".format(
|
||||
", ".join(__AFTER_APP_READY_RUN_TASKS))
|
||||
)
|
||||
for task in __AFTER_APP_READY_RUN_TASKS:
|
||||
subtask(task).delay()
|
||||
|
||||
|
||||
@worker_shutdown.connect
|
||||
def after_app_shutdown(sender=None, headers=None, body=None, **kwargs):
|
||||
from django_celery_beat.models import PeriodicTask
|
||||
logger.debug("App shutdown signal recv")
|
||||
logger.debug("Clean need cleaned period tasks: [{}]".format(
|
||||
', '.join(__AFTER_APP_SHUTDOWN_CLEAN_TASKS))
|
||||
)
|
||||
PeriodicTask.objects.filter(name__in=__AFTER_APP_SHUTDOWN_CLEAN_TASKS).delete()
|
||||
|
|
|
@ -3,15 +3,14 @@
|
|||
import inspect
|
||||
from django.db import models
|
||||
from django.http import JsonResponse
|
||||
from django.utils.timezone import now
|
||||
from django.utils import timezone
|
||||
from django.utils.translation import ugettext_lazy as _
|
||||
|
||||
|
||||
|
||||
class NoDeleteQuerySet(models.query.QuerySet):
|
||||
|
||||
def delete(self):
|
||||
return self.update(is_discard=True, discard_time=now())
|
||||
return self.update(is_discard=True, discard_time=timezone.now())
|
||||
|
||||
|
||||
class NoDeleteManager(models.Manager):
|
||||
|
@ -37,7 +36,7 @@ class NoDeleteModelMixin(models.Model):
|
|||
|
||||
def delete(self):
|
||||
self.is_discard = True
|
||||
self.discard_time = now()
|
||||
self.discard_time = timezone.now()
|
||||
return self.save()
|
||||
|
||||
|
||||
|
@ -88,3 +87,29 @@ class BulkSerializerMixin(object):
|
|||
|
||||
return ret
|
||||
|
||||
|
||||
class DatetimeSearchMixin:
|
||||
date_from = date_to = None
|
||||
|
||||
def get(self, request, *args, **kwargs):
|
||||
date_from_s = self.request.GET.get('date_from')
|
||||
date_to_s = self.request.GET.get('date_to')
|
||||
|
||||
if date_from_s:
|
||||
date_from = timezone.datetime.strptime(date_from_s, '%m/%d/%Y')
|
||||
self.date_from = date_from.replace(
|
||||
tzinfo=timezone.get_current_timezone()
|
||||
)
|
||||
else:
|
||||
self.date_from = timezone.now() - timezone.timedelta(7)
|
||||
|
||||
if date_to_s:
|
||||
date_to = timezone.datetime.strptime(
|
||||
date_to_s + ' 23:59:59', '%m/%d/%Y %H:%M:%S'
|
||||
)
|
||||
self.date_to = date_to.replace(
|
||||
tzinfo=timezone.get_current_timezone()
|
||||
)
|
||||
else:
|
||||
self.date_to = timezone.now()
|
||||
return super().get(request, *args, **kwargs)
|
|
@ -1,6 +1,6 @@
|
|||
from django.core.mail import send_mail
|
||||
from django.conf import settings
|
||||
from common import celery_app as app
|
||||
from .celery import app
|
||||
from .utils import get_logger
|
||||
|
||||
|
||||
|
|
|
@ -6,7 +6,6 @@ from six import string_types
|
|||
import base64
|
||||
import os
|
||||
from itertools import chain
|
||||
import string
|
||||
import logging
|
||||
import datetime
|
||||
import time
|
||||
|
@ -26,9 +25,6 @@ from django.conf import settings
|
|||
from django.utils import timezone
|
||||
|
||||
|
||||
from .compat import to_bytes, to_string
|
||||
|
||||
SECRET_KEY = settings.SECRET_KEY
|
||||
UUID_PATTERN = re.compile(r'[0-9a-zA-Z\-]{36}')
|
||||
|
||||
|
||||
|
@ -50,9 +46,22 @@ def get_object_or_none(model, **kwargs):
|
|||
return obj
|
||||
|
||||
|
||||
class Signer(object):
|
||||
class Singleton(type):
|
||||
def __init__(cls, *args, **kwargs):
|
||||
cls.__instance = None
|
||||
super().__init__(*args, **kwargs)
|
||||
|
||||
def __call__(cls, *args, **kwargs):
|
||||
if cls.__instance is None:
|
||||
cls.__instance = super().__call__(*args, **kwargs)
|
||||
return cls.__instance
|
||||
else:
|
||||
return cls.__instance
|
||||
|
||||
|
||||
class Signer(metaclass=Singleton):
|
||||
"""用来加密,解密,和基于时间戳的方式验证token"""
|
||||
def __init__(self, secret_key=SECRET_KEY):
|
||||
def __init__(self, secret_key=None):
|
||||
self.secret_key = secret_key
|
||||
|
||||
def sign(self, value):
|
||||
|
@ -99,58 +108,10 @@ def combine_seq(s1, s2, callback=None):
|
|||
return seq
|
||||
|
||||
|
||||
def search_object_attr(obj, value='', attr_list=None, ignore_case=False):
|
||||
"""It's provide a method to search a object attribute equal some value
|
||||
|
||||
If object some attribute equal :param: value, return True else return False
|
||||
|
||||
class A():
|
||||
name = 'admin'
|
||||
age = 7
|
||||
|
||||
:param obj: A object
|
||||
:param value: A string match object attribute
|
||||
:param attr_list: Only match attribute in attr_list
|
||||
:param ignore_case: Ignore case
|
||||
:return: Boolean
|
||||
"""
|
||||
if value == '':
|
||||
return True
|
||||
|
||||
try:
|
||||
object_attr = obj.__dict__
|
||||
except AttributeError:
|
||||
return False
|
||||
|
||||
if attr_list is not None:
|
||||
new_object_attr = {}
|
||||
for attr in attr_list:
|
||||
new_object_attr[attr] = object_attr.pop(attr)
|
||||
object_attr = new_object_attr
|
||||
|
||||
if ignore_case:
|
||||
if not isinstance(value, string_types):
|
||||
return False
|
||||
|
||||
if value.lower() in map(string.lower, map(str, object_attr.values())):
|
||||
return True
|
||||
else:
|
||||
if value in object_attr.values():
|
||||
return True
|
||||
return False
|
||||
|
||||
|
||||
def get_logger(name=None):
|
||||
return logging.getLogger('jumpserver.%s' % name)
|
||||
|
||||
|
||||
def int_seq(seq):
|
||||
try:
|
||||
return map(int, seq)
|
||||
except ValueError:
|
||||
return seq
|
||||
|
||||
|
||||
def timesince(dt, since='', default="just now"):
|
||||
"""
|
||||
Returns string representing "time since" e.g.
|
||||
|
@ -390,4 +351,6 @@ def is_uuid(s):
|
|||
return False
|
||||
|
||||
|
||||
signer = Signer()
|
||||
def get_signer():
|
||||
signer = Signer(settings.SECRET_KEY)
|
||||
return signer
|
||||
|
|
|
@ -27,9 +27,7 @@ sys.path.append(PROJECT_DIR)
|
|||
|
||||
# Import project config setting
|
||||
try:
|
||||
from config import config as env_config, env
|
||||
|
||||
CONFIG = env_config.get(env, 'default')()
|
||||
from config import config as CONFIG
|
||||
except ImportError:
|
||||
CONFIG = type('_', (), {'__getattr__': lambda arg1, arg2: None})()
|
||||
|
||||
|
@ -66,12 +64,12 @@ INSTALLED_APPS = [
|
|||
'django_filters',
|
||||
'bootstrap3',
|
||||
'captcha',
|
||||
'django_celery_beat',
|
||||
'django.contrib.auth',
|
||||
'django.contrib.contenttypes',
|
||||
'django.contrib.sessions',
|
||||
'django.contrib.messages',
|
||||
'django.contrib.staticfiles',
|
||||
|
||||
]
|
||||
|
||||
MIDDLEWARE = [
|
||||
|
@ -132,7 +130,6 @@ MESSAGE_STORAGE = 'django.contrib.messages.storage.cookie.CookieStorage'
|
|||
# }
|
||||
# }
|
||||
|
||||
print(CONFIG.DB_ENGINE)
|
||||
DATABASES = {
|
||||
'default': {
|
||||
'ENGINE': 'django.db.backends.{}'.format(CONFIG.DB_ENGINE),
|
||||
|
@ -245,6 +242,7 @@ LOGGING = {
|
|||
# https://docs.djangoproject.com/en/1.10/topics/i18n/
|
||||
LANGUAGE_CODE = 'en-us'
|
||||
|
||||
# TIME_ZONE = 'UTC'
|
||||
TIME_ZONE = 'Asia/Shanghai'
|
||||
|
||||
USE_I18N = True
|
||||
|
@ -260,6 +258,9 @@ LOCALE_PATHS = [os.path.join(BASE_DIR, 'locale'), ]
|
|||
# https://docs.djangoproject.com/en/1.10/howto/static-files/
|
||||
|
||||
STATIC_URL = '/static/'
|
||||
STATIC_ROOT = os.path.join(PROJECT_DIR, "data", "static")
|
||||
STATIC_DIR = os.path.join(BASE_DIR, "static")
|
||||
|
||||
|
||||
STATICFILES_DIRS = (
|
||||
os.path.join(BASE_DIR, "static"),
|
||||
|
@ -325,20 +326,21 @@ if CONFIG.AUTH_LDAP:
|
|||
AUTH_LDAP_USER_ATTR_MAP = CONFIG.AUTH_LDAP_USER_ATTR_MAP
|
||||
|
||||
# Celery using redis as broker
|
||||
BROKER_URL = 'redis://:%(password)s@%(host)s:%(port)s/3' % {
|
||||
CELERY_BROKER_URL = 'redis://:%(password)s@%(host)s:%(port)s/3' % {
|
||||
'password': CONFIG.REDIS_PASSWORD if CONFIG.REDIS_PASSWORD else '',
|
||||
'host': CONFIG.REDIS_HOST or '127.0.0.1',
|
||||
'port': CONFIG.REDIS_PORT or 6379,
|
||||
}
|
||||
CELERY_TASK_SERIALIZER = 'pickle'
|
||||
CELERY_RESULT_SERIALIZER = 'pickle'
|
||||
CELERY_RESULT_BACKEND = BROKER_URL
|
||||
CELERY_RESULT_BACKEND = CELERY_BROKER_URL
|
||||
CELERY_ACCEPT_CONTENT = ['json', 'pickle']
|
||||
CELERY_TASK_RESULT_EXPIRES = 3600
|
||||
CELERYD_LOG_FORMAT = '%(asctime)s [%(module)s %(levelname)s] %(message)s'
|
||||
CELERYD_TASK_LOG_FORMAT = '%(asctime)s [%(module)s %(levelname)s] %(message)s'
|
||||
CELERY_TIMEZONE = TIME_ZONE
|
||||
# TERMINAL_HEATBEAT_INTERVAL = CONFIG.TERMINAL_HEATBEAT_INTERVAL or 30
|
||||
CELERY_RESULT_EXPIRES = 3600
|
||||
CELERY_WORKER_LOG_FORMAT = '%(asctime)s [%(module)s %(levelname)s] %(message)s'
|
||||
CELERY_WORKER_TASK_LOG_FORMAT = '%(asctime)s [%(module)s %(levelname)s] %(message)s'
|
||||
CELERY_TASK_EAGER_PROPAGATES = True
|
||||
# CELERY_TIMEZONE = TIME_ZONE
|
||||
# CELERY_ENABLE_UTC = True
|
||||
|
||||
|
||||
# Cache use redis
|
||||
|
|
|
@ -4,6 +4,7 @@ from __future__ import unicode_literals
|
|||
from django.conf.urls import url, include
|
||||
from django.conf import settings
|
||||
from django.conf.urls.static import static
|
||||
from django.views.static import serve as static_serve
|
||||
|
||||
from rest_framework.schemas import get_schema_view
|
||||
from rest_framework_swagger.renderers import SwaggerUIRenderer, OpenAPIRenderer
|
||||
|
@ -33,8 +34,8 @@ urlpatterns = [
|
|||
|
||||
|
||||
if settings.DEBUG:
|
||||
urlpatterns += static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT)
|
||||
urlpatterns += [
|
||||
url(r'^docs/', schema_view, name="docs"),
|
||||
]
|
||||
] + static(settings.STATIC_URL, document_root=settings.STATIC_DIR) \
|
||||
+ static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT)
|
||||
|
||||
|
|
|
@ -3,4 +3,4 @@
|
|||
from .callback import *
|
||||
from .inventory import *
|
||||
from .runner import *
|
||||
|
||||
from .exceptions import *
|
||||
|
|
|
@ -28,6 +28,7 @@ class AdHocResultCallback(CallbackModule):
|
|||
host = res._host.get_name()
|
||||
task_name = res.task_name
|
||||
task_result = res._result
|
||||
print(task_result)
|
||||
|
||||
if self.results_raw[t].get(host):
|
||||
self.results_raw[t][host][task_name] = task_result
|
||||
|
@ -50,6 +51,7 @@ class AdHocResultCallback(CallbackModule):
|
|||
contacted.remove(host)
|
||||
|
||||
def v2_runner_on_failed(self, result, ignore_errors=False):
|
||||
print("#######RUN FAILED" * 19)
|
||||
self.gather_result("failed", result)
|
||||
super().v2_runner_on_failed(result, ignore_errors=ignore_errors)
|
||||
|
||||
|
|
|
@ -1,6 +1,10 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
#
|
||||
|
||||
__all__ = [
|
||||
'AnsibleError'
|
||||
]
|
||||
|
||||
|
||||
class AnsibleError(Exception):
|
||||
pass
|
||||
|
|
|
@ -27,7 +27,7 @@ Options = namedtuple('Options', [
|
|||
'ssh_common_args', 'ssh_extra_args', 'sftp_extra_args',
|
||||
'scp_extra_args', 'become', 'become_method', 'become_user',
|
||||
'verbosity', 'check', 'extra_vars', 'playbook_path', 'passwords',
|
||||
'diff', 'gathering'
|
||||
'diff', 'gathering', 'remote_tmp',
|
||||
])
|
||||
|
||||
|
||||
|
@ -57,6 +57,7 @@ def get_default_options():
|
|||
passwords=None,
|
||||
diff=False,
|
||||
gathering='implicit',
|
||||
remote_tmp='/tmp/.ansible'
|
||||
)
|
||||
return options
|
||||
|
||||
|
|
|
@ -0,0 +1,84 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
# Generated by Django 1.11 on 2017-12-24 15:21
|
||||
from __future__ import unicode_literals
|
||||
|
||||
from django.db import migrations, models
|
||||
import django.db.models.deletion
|
||||
import uuid
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
initial = True
|
||||
|
||||
dependencies = [
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.CreateModel(
|
||||
name='AdHoc',
|
||||
fields=[
|
||||
('id', models.UUIDField(default=uuid.uuid4, primary_key=True, serialize=False)),
|
||||
('_tasks', models.TextField(verbose_name='Tasks')),
|
||||
('pattern', models.CharField(default='{}', max_length=64, verbose_name='Pattern')),
|
||||
('_options', models.CharField(default='', max_length=1024, verbose_name='Options')),
|
||||
('_hosts', models.TextField(blank=True, verbose_name='Hosts')),
|
||||
('run_as_admin', models.BooleanField(default=False, verbose_name='Run as admin')),
|
||||
('run_as', models.CharField(default='', max_length=128, verbose_name='Run as')),
|
||||
('_become', models.CharField(default='', max_length=1024, verbose_name='Become')),
|
||||
('created_by', models.CharField(default='', max_length=64, null=True, verbose_name='Create by')),
|
||||
('date_created', models.DateTimeField(auto_now_add=True)),
|
||||
],
|
||||
options={
|
||||
'db_table': 'ops_adhoc',
|
||||
'get_latest_by': 'date_created',
|
||||
},
|
||||
),
|
||||
migrations.CreateModel(
|
||||
name='AdHocRunHistory',
|
||||
fields=[
|
||||
('id', models.UUIDField(default=uuid.uuid4, primary_key=True, serialize=False)),
|
||||
('date_start', models.DateTimeField(auto_now_add=True, verbose_name='Start time')),
|
||||
('date_finished', models.DateTimeField(blank=True, null=True, verbose_name='End time')),
|
||||
('timedelta', models.FloatField(default=0.0, null=True, verbose_name='Time')),
|
||||
('is_finished', models.BooleanField(default=False, verbose_name='Is finished')),
|
||||
('is_success', models.BooleanField(default=False, verbose_name='Is success')),
|
||||
('_result', models.TextField(blank=True, null=True, verbose_name='Adhoc raw result')),
|
||||
('_summary', models.TextField(blank=True, null=True, verbose_name='Adhoc result summary')),
|
||||
('adhoc', models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='history', to='ops.AdHoc')),
|
||||
],
|
||||
options={
|
||||
'db_table': 'ops_adhoc_history',
|
||||
'get_latest_by': 'date_start',
|
||||
},
|
||||
),
|
||||
migrations.CreateModel(
|
||||
name='Task',
|
||||
fields=[
|
||||
('id', models.UUIDField(default=uuid.uuid4, primary_key=True, serialize=False)),
|
||||
('name', models.CharField(max_length=128, unique=True, verbose_name='Name')),
|
||||
('interval', models.IntegerField(blank=True, help_text='Units: seconds', null=True, verbose_name='Interval')),
|
||||
('crontab', models.CharField(blank=True, help_text='5 * * * *', max_length=128, null=True, verbose_name='Crontab')),
|
||||
('is_periodic', models.BooleanField(default=False)),
|
||||
('callback', models.CharField(blank=True, max_length=128, null=True, verbose_name='Callback')),
|
||||
('is_deleted', models.BooleanField(default=False)),
|
||||
('comment', models.TextField(blank=True, verbose_name='Comment')),
|
||||
('created_by', models.CharField(blank=True, default='', max_length=128, null=True)),
|
||||
('date_created', models.DateTimeField(auto_now_add=True)),
|
||||
],
|
||||
options={
|
||||
'db_table': 'ops_task',
|
||||
'get_latest_by': 'date_created',
|
||||
},
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='adhocrunhistory',
|
||||
name='task',
|
||||
field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='history', to='ops.Task'),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='adhoc',
|
||||
name='task',
|
||||
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='adhoc', to='ops.Task'),
|
||||
),
|
||||
]
|
|
@ -1,18 +1,25 @@
|
|||
# ~*~ coding: utf-8 ~*~
|
||||
|
||||
import logging
|
||||
import json
|
||||
import uuid
|
||||
|
||||
import time
|
||||
from django.db import models
|
||||
from django.utils import timezone
|
||||
from django.utils.translation import ugettext_lazy as _
|
||||
from django_celery_beat.models import CrontabSchedule, IntervalSchedule, PeriodicTask
|
||||
|
||||
from common.utils import signer
|
||||
from common.utils import get_signer, get_logger
|
||||
from common.celery import delete_celery_periodic_task, create_or_update_celery_periodic_tasks, \
|
||||
disable_celery_periodic_task
|
||||
from .ansible import AdHocRunner, AnsibleError
|
||||
from .inventory import JMSInventory
|
||||
|
||||
__all__ = ["Task", "AdHoc", "AdHocRunHistory"]
|
||||
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
logger = get_logger(__file__)
|
||||
signer = get_signer()
|
||||
|
||||
|
||||
class Task(models.Model):
|
||||
|
@ -22,7 +29,12 @@ class Task(models.Model):
|
|||
"""
|
||||
id = models.UUIDField(default=uuid.uuid4, primary_key=True)
|
||||
name = models.CharField(max_length=128, unique=True, verbose_name=_('Name'))
|
||||
interval = models.IntegerField(verbose_name=_("Interval"), null=True, blank=True, help_text=_("Units: seconds"))
|
||||
crontab = models.CharField(verbose_name=_("Crontab"), null=True, blank=True, max_length=128, help_text=_("5 * * * *"))
|
||||
is_periodic = models.BooleanField(default=False)
|
||||
callback = models.CharField(max_length=128, blank=True, null=True, verbose_name=_("Callback")) # Callback must be a registered celery task
|
||||
is_deleted = models.BooleanField(default=False)
|
||||
comment = models.TextField(blank=True, verbose_name=_("Comment"))
|
||||
created_by = models.CharField(max_length=128, blank=True, null=True, default='')
|
||||
date_created = models.DateTimeField(auto_now_add=True)
|
||||
__latest_adhoc = None
|
||||
|
@ -65,12 +77,54 @@ class Task(models.Model):
|
|||
def get_run_history(self):
|
||||
return self.history.all()
|
||||
|
||||
def run(self):
|
||||
def run(self, record=True):
|
||||
if self.latest_adhoc:
|
||||
return self.latest_adhoc.run()
|
||||
return self.latest_adhoc.run(record=record)
|
||||
else:
|
||||
return {'error': 'No adhoc'}
|
||||
|
||||
def save(self, force_insert=False, force_update=False, using=None,
|
||||
update_fields=None):
|
||||
from .tasks import run_ansible_task
|
||||
super().save(
|
||||
force_insert=force_insert, force_update=force_update,
|
||||
using=using, update_fields=update_fields,
|
||||
)
|
||||
|
||||
if self.is_periodic:
|
||||
interval = None
|
||||
crontab = None
|
||||
|
||||
if self.interval:
|
||||
interval = self.interval
|
||||
elif self.crontab:
|
||||
crontab = self.crontab
|
||||
|
||||
tasks = {
|
||||
self.name: {
|
||||
"task": run_ansible_task.name,
|
||||
"interval": interval,
|
||||
"crontab": crontab,
|
||||
"args": (str(self.id),),
|
||||
"kwargs": {"callback": self.callback},
|
||||
"enabled": True,
|
||||
}
|
||||
}
|
||||
create_or_update_celery_periodic_tasks(tasks)
|
||||
else:
|
||||
disable_celery_periodic_task(self.name)
|
||||
|
||||
def delete(self, using=None, keep_parents=False):
|
||||
super().delete(using=using, keep_parents=keep_parents)
|
||||
delete_celery_periodic_task(self.name)
|
||||
|
||||
@property
|
||||
def schedule(self):
|
||||
try:
|
||||
return PeriodicTask.objects.get(name=self.name)
|
||||
except PeriodicTask.DoesNotExist:
|
||||
return None
|
||||
|
||||
def __str__(self):
|
||||
return self.name
|
||||
|
||||
|
@ -121,6 +175,23 @@ class AdHoc(models.Model):
|
|||
def hosts(self, item):
|
||||
self._hosts = json.dumps(item)
|
||||
|
||||
@property
|
||||
def inventory(self):
|
||||
if self.become:
|
||||
become_info = {
|
||||
'become': {
|
||||
self.become
|
||||
}
|
||||
}
|
||||
else:
|
||||
become_info = None
|
||||
|
||||
inventory = JMSInventory(
|
||||
self.hosts, run_as_admin=self.run_as_admin,
|
||||
run_as=self.run_as, become_info=become_info
|
||||
)
|
||||
return inventory
|
||||
|
||||
@property
|
||||
def become(self):
|
||||
if self._become:
|
||||
|
@ -128,9 +199,42 @@ class AdHoc(models.Model):
|
|||
else:
|
||||
return {}
|
||||
|
||||
def run(self):
|
||||
from .utils import run_adhoc_object
|
||||
return run_adhoc_object(self, **self.options)
|
||||
def run(self, record=True):
|
||||
if record:
|
||||
return self._run_and_record()
|
||||
else:
|
||||
return self._run_only()
|
||||
|
||||
def _run_and_record(self):
|
||||
history = AdHocRunHistory(adhoc=self, task=self.task)
|
||||
time_start = time.time()
|
||||
try:
|
||||
raw, summary = self._run_only()
|
||||
history.is_finished = True
|
||||
if summary.get('dark'):
|
||||
history.is_success = False
|
||||
else:
|
||||
history.is_success = True
|
||||
history.result = raw
|
||||
history.summary = summary
|
||||
return raw, summary
|
||||
except:
|
||||
return {}, {}
|
||||
finally:
|
||||
history.date_finished = timezone.now()
|
||||
history.timedelta = time.time() - time_start
|
||||
history.save()
|
||||
|
||||
def _run_only(self):
|
||||
runner = AdHocRunner(self.inventory)
|
||||
for k, v in self.options.items():
|
||||
runner.set_option(k, v)
|
||||
|
||||
try:
|
||||
result = runner.run(self.tasks, self.pattern, self.task.name)
|
||||
return result.results_raw, result.results_summary
|
||||
except AnsibleError as e:
|
||||
logger.error("Failed run adhoc {}, {}".format(self.task.name, e))
|
||||
|
||||
@become.setter
|
||||
def become(self, item):
|
||||
|
@ -142,7 +246,7 @@ class AdHoc(models.Model):
|
|||
}
|
||||
:return:
|
||||
"""
|
||||
self._become = signer.sign(json.dumps(item))
|
||||
self._become = signer.sign(json.dumps(item)).decode('utf-8')
|
||||
|
||||
@property
|
||||
def options(self):
|
||||
|
@ -167,6 +271,11 @@ class AdHoc(models.Model):
|
|||
except AdHocRunHistory.DoesNotExist:
|
||||
return None
|
||||
|
||||
def save(self, force_insert=False, force_update=False, using=None,
|
||||
update_fields=None):
|
||||
super().save(force_insert=force_insert, force_update=force_update,
|
||||
using=using, update_fields=update_fields)
|
||||
|
||||
def __str__(self):
|
||||
return "{} of {}".format(self.task.name, self.short_id)
|
||||
|
||||
|
|
|
@ -1,7 +1,10 @@
|
|||
# coding: utf-8
|
||||
from celery import shared_task
|
||||
from celery import shared_task, subtask
|
||||
|
||||
from .utils import run_adhoc
|
||||
from common.utils import get_logger, get_object_or_none
|
||||
from .models import Task
|
||||
|
||||
logger = get_logger(__file__)
|
||||
|
||||
|
||||
def rerun_task():
|
||||
|
@ -9,5 +12,31 @@ def rerun_task():
|
|||
|
||||
|
||||
@shared_task
|
||||
def run_add_hoc_and_record_async(adhoc, **options):
|
||||
return run_adhoc(adhoc, **options)
|
||||
def run_ansible_task(task_id, callback=None, **kwargs):
|
||||
"""
|
||||
:param task_id: is the tasks serialized data
|
||||
:param callback: callback function name
|
||||
:return:
|
||||
"""
|
||||
|
||||
task = get_object_or_none(Task, id=task_id)
|
||||
if task:
|
||||
result = task.run()
|
||||
if callback is not None:
|
||||
subtask(callback).delay(result, task_name=task.name)
|
||||
return result
|
||||
else:
|
||||
logger.error("No task found")
|
||||
|
||||
|
||||
@shared_task
|
||||
def hello(name, callback=None):
|
||||
print("Hello {}".format(name))
|
||||
if callback is not None:
|
||||
subtask(callback).delay("Guahongwei")
|
||||
|
||||
|
||||
@shared_task
|
||||
def hello_callback(result):
|
||||
print(result)
|
||||
print("Hello callback")
|
||||
|
|
|
@ -16,9 +16,9 @@
|
|||
<div class="form-group" id="date">
|
||||
<div class="input-daterange input-group" id="datepicker">
|
||||
<span class="input-group-addon"><i class="fa fa-calendar"></i></span>
|
||||
<input type="text" class="input-sm form-control" style="width: 100px;" name="date_from" value="{{ date_from }}">
|
||||
<input type="text" class="input-sm form-control" style="width: 100px;" name="date_from" value="{{ date_from|date:"m/d/Y" }}">
|
||||
<span class="input-group-addon">to</span>
|
||||
<input type="text" class="input-sm form-control" style="width: 100px;" name="date_to" value="{{ date_to }}">
|
||||
<input type="text" class="input-sm form-control" style="width: 100px;" name="date_to" value="{{ date_to|date:"m/d/Y" }}">
|
||||
</div>
|
||||
</div>
|
||||
<div class="input-group">
|
||||
|
@ -57,14 +57,20 @@
|
|||
<td class="text-center">{{ object.adhoc.all | length}}</td>
|
||||
<td class="text-center">{{ object.latest_adhoc.hosts | length}}</td>
|
||||
<td class="text-center">
|
||||
{% if object.latest_history.is_success %}
|
||||
<i class="fa fa-check text-navy"></i>
|
||||
{% else %}
|
||||
<i class="fa fa-times text-danger"></i>
|
||||
{% if object.latest_history %}
|
||||
{% if object.latest_history.is_success %}
|
||||
<i class="fa fa-check text-navy"></i>
|
||||
{% else %}
|
||||
<i class="fa fa-times text-danger"></i>
|
||||
{% endif %}
|
||||
{% endif %}
|
||||
</td>
|
||||
<td class="text-center">{{ object.latest_history.date_start }}</td>
|
||||
<td class="text-center">{{ object.latest_history.timedelta|floatformat }} s</td>
|
||||
<td class="text-center">
|
||||
{% if object.latest_history %}
|
||||
{{ object.latest_history.timedelta|floatformat }} s
|
||||
{% endif %}
|
||||
</td>
|
||||
<td class="text-center">
|
||||
<a href="{% url 'ops:task-run' pk=object.id %}" class="btn btn-xs btn-info">{% trans "Run" %}</a>
|
||||
<a data-uid="{{ object.id }}" class="btn btn-xs btn-danger btn-del">{% trans "Delete" %}</a>
|
||||
|
@ -83,7 +89,10 @@ $(document).ready(function() {
|
|||
"bInfo" : false,
|
||||
"order": []
|
||||
});
|
||||
$('.select2').select2();
|
||||
$('.select2').select2({
|
||||
dropdownAutoWidth : true,
|
||||
width: 'auto'
|
||||
});
|
||||
$('#date .input-daterange').datepicker({
|
||||
dateFormat: 'mm/dd/yy',
|
||||
keyboardNavigation: false,
|
||||
|
|
|
@ -1,122 +1,37 @@
|
|||
# ~*~ coding: utf-8 ~*~
|
||||
|
||||
import time
|
||||
from django.utils import timezone
|
||||
from django.db import transaction
|
||||
|
||||
from common.utils import get_logger, get_object_or_none, get_short_uuid_str
|
||||
from .ansible import AdHocRunner, CommandResultCallback
|
||||
from .inventory import JMSInventory
|
||||
from .ansible.exceptions import AnsibleError
|
||||
from .models import AdHocRunHistory, Task, AdHoc
|
||||
from common.utils import get_logger, get_object_or_none
|
||||
from .models import Task, AdHoc
|
||||
|
||||
logger = get_logger(__file__)
|
||||
|
||||
|
||||
def record_adhoc(func):
|
||||
def _deco(adhoc, **options):
|
||||
record = AdHocRunHistory(adhoc=adhoc, task=adhoc.task)
|
||||
time_start = time.time()
|
||||
try:
|
||||
result = func(adhoc, **options)
|
||||
record.is_finished = True
|
||||
if result.results_summary.get('dark'):
|
||||
record.is_success = False
|
||||
else:
|
||||
record.is_success = True
|
||||
record.result = result.results_raw
|
||||
record.summary = result.results_summary
|
||||
return result
|
||||
finally:
|
||||
record.date_finished = timezone.now()
|
||||
record.timedelta = time.time() - time_start
|
||||
record.save()
|
||||
return _deco
|
||||
def get_task_by_id(task_id):
|
||||
return get_object_or_none(Task, id=task_id)
|
||||
|
||||
|
||||
def get_adhoc_inventory(adhoc):
|
||||
if adhoc.become:
|
||||
become_info = {
|
||||
'become': {
|
||||
adhoc.become
|
||||
}
|
||||
}
|
||||
else:
|
||||
become_info = None
|
||||
|
||||
inventory = JMSInventory(
|
||||
adhoc.hosts, run_as_admin=adhoc.run_as_admin,
|
||||
run_as=adhoc.run_as, become_info=become_info
|
||||
)
|
||||
return inventory
|
||||
|
||||
|
||||
def get_inventory(hostname_list, run_as_admin=False, run_as=None, become_info=None):
|
||||
return JMSInventory(
|
||||
hostname_list, run_as_admin=run_as_admin,
|
||||
run_as=run_as, become_info=become_info
|
||||
)
|
||||
|
||||
|
||||
def get_adhoc_runner(hostname_list, run_as_admin=False, run_as=None, become_info=None):
|
||||
inventory = get_inventory(
|
||||
hostname_list, run_as_admin=run_as_admin,
|
||||
run_as=run_as, become_info=become_info
|
||||
)
|
||||
runner = AdHocRunner(inventory)
|
||||
return runner
|
||||
|
||||
|
||||
@record_adhoc
|
||||
def run_adhoc_object(adhoc, **options):
|
||||
"""
|
||||
:param adhoc: Instance of AdHoc
|
||||
:param options: ansible support option, like forks ...
|
||||
:return:
|
||||
"""
|
||||
name = adhoc.task.name
|
||||
inventory = get_adhoc_inventory(adhoc)
|
||||
runner = AdHocRunner(inventory)
|
||||
for k, v in options.items():
|
||||
runner.set_option(k, v)
|
||||
|
||||
try:
|
||||
result = runner.run(adhoc.tasks, adhoc.pattern, name)
|
||||
return result
|
||||
except AnsibleError as e:
|
||||
logger.error("Failed run adhoc {}, {}".format(name, e))
|
||||
raise
|
||||
|
||||
|
||||
def run_adhoc(hostname_list, pattern, tasks, name=None,
|
||||
run_as_admin=False, run_as=None, become_info=None):
|
||||
if name is None:
|
||||
name = "Adhoc-task-{}-{}".format(
|
||||
get_short_uuid_str(),
|
||||
timezone.now().strftime("%Y-%m-%d %H:%M:%S"),
|
||||
)
|
||||
|
||||
inventory = get_inventory(
|
||||
hostname_list, run_as_admin=run_as_admin,
|
||||
run_as=run_as, become_info=become_info
|
||||
)
|
||||
runner = AdHocRunner(inventory)
|
||||
return runner.run(tasks, pattern, play_name=name)
|
||||
|
||||
|
||||
def create_or_update_task(
|
||||
task_name, hosts, tasks, pattern='all', options=None,
|
||||
def update_or_create_ansible_task(
|
||||
task_name, hosts, tasks,
|
||||
interval=None, crontab=None, is_periodic=False,
|
||||
callback=None, pattern='all', options=None,
|
||||
run_as_admin=False, run_as="", become_info=None,
|
||||
created_by=None
|
||||
created_by=None,
|
||||
):
|
||||
print(options)
|
||||
print(task_name)
|
||||
task = get_object_or_none(Task, name=task_name)
|
||||
if task is None:
|
||||
task = Task(name=task_name, created_by=created_by)
|
||||
task.save()
|
||||
|
||||
adhoc = task.get_latest_adhoc()
|
||||
defaults = {
|
||||
'name': task_name,
|
||||
'interval': interval,
|
||||
'crontab': crontab,
|
||||
'is_periodic': is_periodic,
|
||||
'callback': callback,
|
||||
'created_by': created_by,
|
||||
}
|
||||
|
||||
created = False
|
||||
task, _ = Task.objects.update_or_create(
|
||||
defaults=defaults, name=task_name,
|
||||
)
|
||||
|
||||
adhoc = task.latest_adhoc
|
||||
new_adhoc = AdHoc(task=task, pattern=pattern,
|
||||
run_as_admin=run_as_admin,
|
||||
run_as=run_as)
|
||||
|
@ -124,11 +39,13 @@ def create_or_update_task(
|
|||
new_adhoc.tasks = tasks
|
||||
new_adhoc.options = options
|
||||
new_adhoc.become = become_info
|
||||
|
||||
if not adhoc or adhoc != new_adhoc:
|
||||
logger.debug("Task create new adhoc: {}".format(task_name))
|
||||
new_adhoc.save()
|
||||
task.latest_adhoc = new_adhoc
|
||||
print("Return task")
|
||||
return task
|
||||
created = True
|
||||
return task, created
|
||||
|
||||
|
||||
|
||||
|
|
|
@ -9,40 +9,27 @@ from django.views.generic import ListView, DetailView, View
|
|||
from django.utils import timezone
|
||||
from django.shortcuts import redirect, reverse
|
||||
|
||||
from common.mixins import DatetimeSearchMixin
|
||||
from .models import Task, AdHoc, AdHocRunHistory
|
||||
from ops.tasks import rerun_task
|
||||
|
||||
|
||||
class TaskListView(ListView):
|
||||
class TaskListView(DatetimeSearchMixin, ListView):
|
||||
paginate_by = settings.CONFIG.DISPLAY_PER_PAGE
|
||||
model = Task
|
||||
ordering = ('-date_created',)
|
||||
context_object_name = 'task_list'
|
||||
template_name = 'ops/task_list.html'
|
||||
date_format = '%m/%d/%Y'
|
||||
keyword = date_from_s = date_to_s = ''
|
||||
keyword = ''
|
||||
|
||||
def get_queryset(self):
|
||||
date_to_default = timezone.now()
|
||||
date_from_default = timezone.now() - timezone.timedelta(7)
|
||||
date_from_default_s = date_from_default.strftime(self.date_format)
|
||||
date_to_default_s = date_to_default.strftime(self.date_format)
|
||||
|
||||
self.queryset = super().get_queryset()
|
||||
self.keyword = self.request.GET.get('keyword', '')
|
||||
self.date_from_s = self.request.GET.get('date_from', date_from_default_s)
|
||||
self.date_to_s = self.request.GET.get('date_to', date_to_default_s)
|
||||
|
||||
if self.date_from_s:
|
||||
date_from = datetime.strptime(self.date_from_s, self.date_format)
|
||||
date_from = date_from.replace(tzinfo=timezone.get_current_timezone())
|
||||
self.queryset = self.queryset.filter(date_created__gt=date_from)
|
||||
|
||||
if self.date_to_s:
|
||||
date_to = timezone.datetime.strptime(
|
||||
self.date_to_s + ' 23:59:59', '%m/%d/%Y %H:%M:%S')
|
||||
date_to = date_to.replace(tzinfo=timezone.get_current_timezone())
|
||||
self.queryset = self.queryset.filter(date_created__lt=date_to)
|
||||
self.queryset = self.queryset.filter(
|
||||
date_created__gt=self.date_from,
|
||||
date_created__lt=self.date_to
|
||||
)
|
||||
|
||||
if self.keyword:
|
||||
self.queryset = self.queryset.filter(
|
||||
|
@ -51,15 +38,16 @@ class TaskListView(ListView):
|
|||
return self.queryset
|
||||
|
||||
def get_context_data(self, **kwargs):
|
||||
print(self.date_from)
|
||||
context = {
|
||||
'app': 'Ops',
|
||||
'action': _('Task list'),
|
||||
'date_from': self.date_from_s,
|
||||
'date_to': self.date_to_s,
|
||||
'date_from': self.date_from,
|
||||
'date_to': self.date_to,
|
||||
'keyword': self.keyword,
|
||||
}
|
||||
kwargs.update(context)
|
||||
return super(TaskListView, self).get_context_data(**kwargs)
|
||||
return super().get_context_data(**kwargs)
|
||||
|
||||
|
||||
class TaskDetailView(DetailView):
|
||||
|
|
|
@ -0,0 +1,34 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
# Generated by Django 1.11 on 2017-12-24 15:21
|
||||
from __future__ import unicode_literals
|
||||
|
||||
import common.utils
|
||||
from django.db import migrations, models
|
||||
import uuid
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
initial = True
|
||||
|
||||
dependencies = [
|
||||
('assets', '0001_initial'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.CreateModel(
|
||||
name='AssetPermission',
|
||||
fields=[
|
||||
('id', models.UUIDField(default=uuid.uuid4, primary_key=True, serialize=False)),
|
||||
('name', models.CharField(max_length=128, unique=True, verbose_name='Name')),
|
||||
('is_active', models.BooleanField(default=True, verbose_name='Active')),
|
||||
('date_expired', models.DateTimeField(default=common.utils.date_expired_default, verbose_name='Date expired')),
|
||||
('created_by', models.CharField(blank=True, max_length=128, verbose_name='Created by')),
|
||||
('date_created', models.DateTimeField(auto_now_add=True, verbose_name='Date created')),
|
||||
('comment', models.TextField(blank=True, verbose_name='Comment')),
|
||||
('asset_groups', models.ManyToManyField(blank=True, related_name='granted_by_permissions', to='assets.AssetGroup', verbose_name='Asset group')),
|
||||
('assets', models.ManyToManyField(blank=True, related_name='granted_by_permissions', to='assets.Asset', verbose_name='Asset')),
|
||||
('system_users', models.ManyToManyField(related_name='granted_by_permissions', to='assets.SystemUser', verbose_name='System user')),
|
||||
],
|
||||
),
|
||||
]
|
|
@ -79,7 +79,7 @@
|
|||
<script src="{% static 'js/plugins/datepicker/bootstrap-datepicker.js' %}"></script>
|
||||
<script>
|
||||
$(document).ready(function () {
|
||||
$('.select2').select2();
|
||||
$('.select2').select2({ dropdownAutoWidth : true, width: 'auto' });
|
||||
|
||||
$('.input-group.date').datepicker({
|
||||
format: "yyyy-mm-dd",
|
||||
|
|
|
@ -190,7 +190,7 @@ function updateSystemUser(system_users) {
|
|||
}
|
||||
|
||||
$(document).ready(function () {
|
||||
$('.select2').select2()
|
||||
$('.select2').select2({ dropdownAutoWidth : true, width: 'auto' });
|
||||
.on('select2:select', function(evt) {
|
||||
var data = evt.params.data;
|
||||
jumpserver.system_users_selected[data.id] = data.text;
|
||||
|
|
|
@ -262,9 +262,6 @@ jumpserver.initDataTable = function (options) {
|
|||
var table = ele.DataTable({
|
||||
pageLength: options.pageLength || 15,
|
||||
dom: options.dom || '<"#uc.pull-left">flt<"row m-t"<"col-md-8"<"#op.col-md-6"><"col-md-6 text-center"i>><"col-md-4"p>>',
|
||||
language: {
|
||||
url: options.i18n_url || "/static/js/plugins/dataTables/i18n/zh-hans.json"
|
||||
},
|
||||
order: options.order || [],
|
||||
select: options.select || 'multi',
|
||||
buttons: [],
|
||||
|
|
|
@ -0,0 +1 @@
|
|||
2
|
|
@ -1,6 +1,6 @@
|
|||
<div class="footer fixed">
|
||||
<div class="pull-right">
|
||||
Version <strong>0.4.0</strong> GPL.
|
||||
Version <strong>0.5.0-{% include '_build.html' %}</strong> GPLv2.
|
||||
<img style="display: none" src="http://www.jumpserver.org/img/evaluate_avatar1.jpg">
|
||||
</div>
|
||||
<div>
|
||||
|
|
|
@ -2,7 +2,7 @@
|
|||
<div class="sidebar-collapse">
|
||||
<ul class="nav" id="side-menu">
|
||||
{% include '_user_profile.html' %}
|
||||
{% if request.user.is_superuser and request.COOKIES.admin == "Yes" %}
|
||||
{% if request.user.is_superuser and request.COOKIES.IN_ADMIN_PAGE != "No" %}
|
||||
{% include '_nav.html' %}
|
||||
{% else %}
|
||||
{% include '_nav_user.html' %}
|
||||
|
|
|
@ -20,7 +20,7 @@
|
|||
<li><a href="{% url 'users:user-profile-update' %}">{% trans 'Profile settings' %}</a></li>
|
||||
<li class="divider"></li>
|
||||
{% if request.user.is_superuser %}
|
||||
{% if request.COOKIES.admin == 'No' %}
|
||||
{% if request.COOKIES.IN_ADMIN_PAGE == 'No' %}
|
||||
<li><a id="switch_admin">{% trans 'Admin page' %}</a></li>
|
||||
{% else %}
|
||||
<li><a id="switch_user">{% trans 'User page' %}</a></li>
|
||||
|
@ -37,11 +37,11 @@
|
|||
$(document).ready(function () {
|
||||
})
|
||||
.on('click', '#switch_admin', function () {
|
||||
setCookie("admin", "Yes");
|
||||
setCookie("IN_ADMIN_PAGE", "Yes");
|
||||
window.location = "/"
|
||||
})
|
||||
.on('click', '#switch_user', function () {
|
||||
setCookie("admin", "No");
|
||||
setCookie("IN_ADMIN_PAGE", "No");
|
||||
window.location = "/"
|
||||
})
|
||||
</script>
|
||||
|
|
|
@ -3,11 +3,13 @@
|
|||
from collections import OrderedDict
|
||||
import copy
|
||||
import logging
|
||||
|
||||
import os
|
||||
import uuid
|
||||
|
||||
from rest_framework import viewsets, serializers
|
||||
from rest_framework.views import APIView, Response
|
||||
from rest_framework.permissions import AllowAny
|
||||
from django.core.cache import cache
|
||||
from django.shortcuts import get_object_or_404, redirect
|
||||
from django.utils import timezone
|
||||
from django.core.files.storage import default_storage
|
||||
|
@ -35,7 +37,7 @@ class TerminalViewSet(viewsets.ModelViewSet):
|
|||
x_real_ip = request.META.get('X-Real-IP')
|
||||
remote_addr = x_real_ip or remote_ip
|
||||
|
||||
terminal = get_object_or_none(Terminal, name=name)
|
||||
terminal = get_object_or_none(Terminal, name=name, is_deleted=False)
|
||||
if terminal:
|
||||
msg = 'Terminal name %s already used' % name
|
||||
return Response({'msg': msg}, status=409)
|
||||
|
@ -46,12 +48,11 @@ class TerminalViewSet(viewsets.ModelViewSet):
|
|||
|
||||
if serializer.is_valid():
|
||||
terminal = serializer.save()
|
||||
app_user, access_key = terminal.create_app_user()
|
||||
data = OrderedDict()
|
||||
data['terminal'] = copy.deepcopy(serializer.data)
|
||||
data['user'] = app_user.to_json()
|
||||
data['access_key'] = {'id': access_key.id,
|
||||
'secret': access_key.secret}
|
||||
|
||||
# App should use id, token get access key, if accepted
|
||||
token = uuid.uuid4().hex
|
||||
cache.set(token, str(terminal.id), 3600)
|
||||
data = {"id": str(terminal.id), "token": token, "msg": "Need accept"}
|
||||
return Response(data, status=201)
|
||||
else:
|
||||
data = serializer.errors
|
||||
|
@ -63,6 +64,36 @@ class TerminalViewSet(viewsets.ModelViewSet):
|
|||
return super().get_permissions()
|
||||
|
||||
|
||||
class TerminalTokenApi(APIView):
|
||||
permission_classes = (AllowAny,)
|
||||
queryset = Terminal.objects.filter(is_deleted=False)
|
||||
|
||||
def get(self, request, *args, **kwargs):
|
||||
try:
|
||||
terminal = self.queryset.get(id=kwargs.get('terminal'))
|
||||
except Terminal.DoesNotExist:
|
||||
terminal = None
|
||||
|
||||
token = request.query_params.get("token")
|
||||
|
||||
if terminal is None:
|
||||
return Response('May be reject by administrator', status=401)
|
||||
|
||||
if token is None or cache.get(token, "") != str(terminal.id):
|
||||
return Response('Token is not valid', status=401)
|
||||
|
||||
if not terminal.is_accepted:
|
||||
return Response("Terminal was not accepted yet", status=400)
|
||||
|
||||
if not terminal.user or not terminal.user.access_key.all():
|
||||
return Response("No access key generate", status=401)
|
||||
|
||||
access_key = terminal.user.access_key.first()
|
||||
data = OrderedDict()
|
||||
data['access_key'] = {'id': access_key.id, 'secret': access_key.secret}
|
||||
return Response(data, status=200)
|
||||
|
||||
|
||||
class StatusViewSet(viewsets.ModelViewSet):
|
||||
queryset = Status.objects.all()
|
||||
serializer_class = StatusSerializer
|
||||
|
|
|
@ -0,0 +1,102 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
# Generated by Django 1.11 on 2017-12-24 15:21
|
||||
from __future__ import unicode_literals
|
||||
|
||||
from django.db import migrations, models
|
||||
import uuid
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
initial = True
|
||||
|
||||
dependencies = [
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.CreateModel(
|
||||
name='Command',
|
||||
fields=[
|
||||
('id', models.UUIDField(default=uuid.uuid4, primary_key=True, serialize=False)),
|
||||
('user', models.CharField(max_length=64, verbose_name='User')),
|
||||
('asset', models.CharField(max_length=128, verbose_name='Asset')),
|
||||
('system_user', models.CharField(max_length=64, verbose_name='System user')),
|
||||
('input', models.CharField(db_index=True, max_length=128, verbose_name='Input')),
|
||||
('output', models.CharField(blank=True, max_length=1024, verbose_name='Output')),
|
||||
('session', models.CharField(db_index=True, max_length=36, verbose_name='Session')),
|
||||
('timestamp', models.IntegerField(db_index=True)),
|
||||
],
|
||||
options={
|
||||
'db_table': 'terminal_command',
|
||||
'ordering': ('-timestamp',),
|
||||
},
|
||||
),
|
||||
migrations.CreateModel(
|
||||
name='Session',
|
||||
fields=[
|
||||
('id', models.UUIDField(default=uuid.uuid4, primary_key=True, serialize=False)),
|
||||
('user', models.CharField(max_length=128, verbose_name='User')),
|
||||
('asset', models.CharField(max_length=1024, verbose_name='Asset')),
|
||||
('system_user', models.CharField(max_length=128, verbose_name='System user')),
|
||||
('login_from', models.CharField(choices=[('ST', 'SSH Terminal'), ('WT', 'Web Terminal')], default='ST', max_length=2)),
|
||||
('is_finished', models.BooleanField(default=False)),
|
||||
('has_replay', models.BooleanField(default=False, verbose_name='Replay')),
|
||||
('has_command', models.BooleanField(default=False, verbose_name='Command')),
|
||||
('date_start', models.DateTimeField(verbose_name='Date start')),
|
||||
('date_end', models.DateTimeField(null=True, verbose_name='Date end')),
|
||||
],
|
||||
options={
|
||||
'db_table': 'terminal_session',
|
||||
'ordering': ['-date_start'],
|
||||
},
|
||||
),
|
||||
migrations.CreateModel(
|
||||
name='Status',
|
||||
fields=[
|
||||
('id', models.UUIDField(default=uuid.uuid4, primary_key=True, serialize=False)),
|
||||
('session_online', models.IntegerField(default=0, verbose_name='Session Online')),
|
||||
('cpu_used', models.FloatField(verbose_name='CPU Usage')),
|
||||
('memory_used', models.FloatField(verbose_name='Memory Used')),
|
||||
('connections', models.IntegerField(verbose_name='Connections')),
|
||||
('threads', models.IntegerField(verbose_name='Threads')),
|
||||
('boot_time', models.FloatField(verbose_name='Boot Time')),
|
||||
('date_created', models.DateTimeField(auto_now_add=True)),
|
||||
],
|
||||
options={
|
||||
'db_table': 'terminal_status',
|
||||
'get_latest_by': 'date_created',
|
||||
},
|
||||
),
|
||||
migrations.CreateModel(
|
||||
name='Task',
|
||||
fields=[
|
||||
('id', models.UUIDField(default=uuid.uuid4, primary_key=True, serialize=False)),
|
||||
('name', models.CharField(choices=[('kill_session', 'Kill Session')], max_length=128, verbose_name='Name')),
|
||||
('args', models.CharField(max_length=1024, verbose_name='Args')),
|
||||
('is_finished', models.BooleanField(default=False)),
|
||||
('date_created', models.DateTimeField(auto_now_add=True)),
|
||||
('date_finished', models.DateTimeField(null=True)),
|
||||
],
|
||||
options={
|
||||
'db_table': 'terminal_task',
|
||||
},
|
||||
),
|
||||
migrations.CreateModel(
|
||||
name='Terminal',
|
||||
fields=[
|
||||
('id', models.UUIDField(default=uuid.uuid4, primary_key=True, serialize=False)),
|
||||
('name', models.CharField(max_length=32, unique=True, verbose_name='Name')),
|
||||
('remote_addr', models.CharField(max_length=128, verbose_name='Remote Address')),
|
||||
('ssh_port', models.IntegerField(default=2222, verbose_name='SSH Port')),
|
||||
('http_port', models.IntegerField(default=5000, verbose_name='HTTP Port')),
|
||||
('is_accepted', models.BooleanField(default=False, verbose_name='Is Accepted')),
|
||||
('is_deleted', models.BooleanField(default=False)),
|
||||
('date_created', models.DateTimeField(auto_now_add=True)),
|
||||
('comment', models.TextField(blank=True, verbose_name='Comment')),
|
||||
],
|
||||
options={
|
||||
'db_table': 'terminal',
|
||||
'ordering': ('is_accepted',),
|
||||
},
|
||||
),
|
||||
]
|
|
@ -11,7 +11,7 @@ from .backends.command.models import AbstractSessionCommand
|
|||
|
||||
class Terminal(models.Model):
|
||||
id = models.UUIDField(default=uuid.uuid4, primary_key=True)
|
||||
name = models.CharField(max_length=32, unique=True, verbose_name=_('Name'))
|
||||
name = models.CharField(max_length=32, verbose_name=_('Name'))
|
||||
remote_addr = models.CharField(max_length=128, verbose_name=_('Remote Address'))
|
||||
ssh_port = models.IntegerField(verbose_name=_('SSH Port'), default=2222)
|
||||
http_port = models.IntegerField(verbose_name=_('HTTP Port'), default=5000)
|
||||
|
@ -34,7 +34,8 @@ class Terminal(models.Model):
|
|||
self.user.save()
|
||||
|
||||
def create_app_user(self):
|
||||
user, access_key = User.create_app_user(name=self.name, comment=self.comment)
|
||||
random = uuid.uuid4().hex[:6]
|
||||
user, access_key = User.create_app_user(name="{}-{}".format(self.name, random), comment=self.comment)
|
||||
self.user = user
|
||||
self.save()
|
||||
return user, access_key
|
||||
|
@ -42,6 +43,7 @@ class Terminal(models.Model):
|
|||
def delete(self, using=None, keep_parents=False):
|
||||
if self.user:
|
||||
self.user.delete()
|
||||
self.user = None
|
||||
self.is_deleted = True
|
||||
self.save()
|
||||
return
|
||||
|
|
|
@ -14,8 +14,11 @@ class TerminalSerializer(serializers.ModelSerializer):
|
|||
|
||||
class Meta:
|
||||
model = Terminal
|
||||
fields = ['id', 'name', 'remote_addr', 'http_port', 'ssh_port',
|
||||
'comment', 'is_accepted', 'session_online', 'is_alive']
|
||||
fields = [
|
||||
'id', 'name', 'remote_addr', 'http_port', 'ssh_port',
|
||||
'comment', 'is_accepted', "is_active", 'session_online',
|
||||
'is_alive'
|
||||
]
|
||||
|
||||
@staticmethod
|
||||
def get_session_online(obj):
|
||||
|
|
|
@ -5,6 +5,8 @@
|
|||
{% block custom_head_css_js %}
|
||||
<link href="{% static "css/plugins/footable/footable.core.css" %}" rel="stylesheet">
|
||||
<link href="{% static 'css/plugins/datepicker/datepicker3.css' %}" rel="stylesheet">
|
||||
<link href="{% static 'css/plugins/select2/select2.min.css' %}" rel="stylesheet">
|
||||
<script src="{% static 'js/plugins/select2/select2.full.min.js' %}"></script>
|
||||
<style>
|
||||
#search_btn {
|
||||
margin-bottom: 0;
|
||||
|
@ -20,9 +22,9 @@
|
|||
<div class="form-group" id="date">
|
||||
<div class="input-daterange input-group" id="datepicker">
|
||||
<span class="input-group-addon"><i class="fa fa-calendar"></i></span>
|
||||
<input type="text" class="input-sm form-control" style="width: 100px;" name="date_from" value="{{ date_from }}">
|
||||
<input type="text" class="input-sm form-control" style="width: 100px;" name="date_from" value="{{ date_from|date:"m/d/Y" }}">
|
||||
<span class="input-group-addon">to</span>
|
||||
<input type="text" class="input-sm form-control" style="width: 100px;" name="date_to" value="{{ date_to }}">
|
||||
<input type="text" class="input-sm form-control" style="width: 100px;" name="date_to" value="{{ date_to|date:"m/d/Y" }}">
|
||||
</div>
|
||||
</div>
|
||||
<div class="input-group">
|
||||
|
@ -93,20 +95,23 @@
|
|||
{% endblock %}
|
||||
|
||||
{% block custom_foot_js %}
|
||||
<script src="{% static "js/plugins/footable/footable.all.min.js" %}"></script>
|
||||
<script src="{% static 'js/plugins/datepicker/bootstrap-datepicker.js' %}"></script>
|
||||
<script>
|
||||
$(document).ready(function () {
|
||||
$('.footable').footable();
|
||||
$('.select2').select2();
|
||||
$('#date .input-daterange').datepicker({
|
||||
dateFormat: 'mm/dd/yy',
|
||||
keyboardNavigation: false,
|
||||
forceParse: false,
|
||||
autoclose: true
|
||||
});
|
||||
<script src="{% static "js/plugins/footable/footable.all.min.js" %}"></script>
|
||||
<script src="{% static 'js/plugins/datepicker/bootstrap-datepicker.js' %}"></script>
|
||||
<script>
|
||||
$(document).ready(function () {
|
||||
$('.footable').footable();
|
||||
$('.select2').select2({
|
||||
dropdownAutoWidth : true,
|
||||
width: 'auto'
|
||||
});
|
||||
</script>
|
||||
$('#date .input-daterange').datepicker({
|
||||
dateFormat: 'mm/dd/yy',
|
||||
keyboardNavigation: false,
|
||||
forceParse: false,
|
||||
autoclose: true
|
||||
});
|
||||
});
|
||||
</script>
|
||||
{% endblock %}
|
||||
|
||||
|
||||
|
|
|
@ -4,6 +4,8 @@
|
|||
{% load terminal_tags %}
|
||||
{% block custom_head_css_js %}
|
||||
<link href="{% static 'css/plugins/datepicker/datepicker3.css' %}" rel="stylesheet">
|
||||
<link href="{% static "css/plugins/select2/select2.min.css" %}" rel="stylesheet">
|
||||
<script src="{% static "js/plugins/select2/select2.full.min.js" %}"></script>
|
||||
<style>
|
||||
#search_btn {
|
||||
margin-bottom: 0;
|
||||
|
@ -20,9 +22,9 @@
|
|||
<div class="form-group" id="date">
|
||||
<div class="input-daterange input-group" id="datepicker">
|
||||
<span class="input-group-addon"><i class="fa fa-calendar"></i></span>
|
||||
<input type="text" class="input-sm form-control" style="width: 100px;" name="date_from" value="{{ date_from }}">
|
||||
<input type="text" class="input-sm form-control" style="width: 100px;" name="date_from" value="{{ date_from|date:"m/d/Y" }}">
|
||||
<span class="input-group-addon">to</span>
|
||||
<input type="text" class="input-sm form-control" style="width: 100px;" name="date_to" value="{{ date_to }}">
|
||||
<input type="text" class="input-sm form-control" style="width: 100px;" name="date_to" value="{{ date_to|date:"m/d/Y" }}">
|
||||
</div>
|
||||
</div>
|
||||
<div class="input-group">
|
||||
|
@ -129,7 +131,8 @@
|
|||
"order": []
|
||||
});
|
||||
$('.select2').select2({
|
||||
dropdownAutoWidth: true
|
||||
dropdownAutoWidth: true,
|
||||
width: "auto"
|
||||
});
|
||||
$('#date .input-daterange').datepicker({
|
||||
dateFormat: 'mm/dd/yy',
|
||||
|
|
|
@ -89,7 +89,7 @@ function initTable() {
|
|||
],
|
||||
ajax_url: '{% url "api-terminal:terminal-list" %}',
|
||||
columns: [{data: function(){return ""}}, {data: "name" }, {data: "remote_addr" }, {data: "ssh_port"}, {data: "http_port"},
|
||||
{data: "session_online"}, {data: "is_accepted" }, {data: 'is_alive'}, {data: "id"}],
|
||||
{data: "session_online"}, {data: "is_active" }, {data: 'is_alive'}, {data: "id"}],
|
||||
op_html: $('#actions').html()
|
||||
};
|
||||
jumpserver.initDataTable(options);
|
||||
|
|
|
@ -57,7 +57,7 @@
|
|||
<script src="{% static 'js/plugins/datepicker/bootstrap-datepicker.js' %}"></script>
|
||||
<script>
|
||||
$(document).ready(function () {
|
||||
$('.select2').select2();
|
||||
$('.select2').select2({ dropdownAutoWidth : true, width: 'auto' });
|
||||
|
||||
$('.input-group.date').datepicker({
|
||||
format: "yyyy-mm-dd",
|
||||
|
|
|
@ -20,6 +20,7 @@ urlpatterns = [
|
|||
url(r'^v1/sessions/(?P<pk>[0-9a-zA-Z\-]{36})/replay/$',
|
||||
api.SessionReplayViewSet.as_view({'get': 'retrieve', 'post': 'create'}),
|
||||
name='session-replay'),
|
||||
url(r'^v1/terminal/(?P<terminal>[a-zA-Z0-9\-]{36})/access-key', api.TerminalTokenApi.as_view(), name='terminal-access-key')
|
||||
]
|
||||
|
||||
urlpatterns += router.urls
|
||||
|
|
|
@ -15,6 +15,7 @@ urlpatterns = [
|
|||
url(r'^terminal/(?P<pk>[0-9a-zA-Z\-]{36})/connect/$', views.TerminalConnectView.as_view(), name='terminal-connect'),
|
||||
url(r'^terminal/(?P<pk>[0-9a-zA-Z\-]{36})/update/$', views.TerminalUpdateView.as_view(), name='terminal-update'),
|
||||
url(r'^(?P<pk>[0-9a-zA-Z\-]{36})/accept/$', views.TerminalAcceptView.as_view(), name='terminal-accept'),
|
||||
url(r'^web-terminal/$', views.WebTerminalView.as_view(), name='web-terminal'),
|
||||
|
||||
# Session view
|
||||
url(r'^session-online/$', views.SessionOnlineListView.as_view(), name='session-online-list'),
|
||||
|
|
|
@ -18,8 +18,6 @@ def get_session_system_user_list():
|
|||
return set(list(Session.objects.values_list('system_user', flat=True)))
|
||||
|
||||
|
||||
|
||||
|
||||
def get_user_list_from_cache():
|
||||
return cache.get(USERS_CACHE_KEY)
|
||||
|
||||
|
|
|
@ -1,12 +1,12 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
#
|
||||
from datetime import datetime
|
||||
|
||||
from django.views.generic import ListView
|
||||
from django.conf import settings
|
||||
from django.utils import timezone
|
||||
from django.utils.translation import ugettext as _
|
||||
|
||||
from common.mixins import DatetimeSearchMixin
|
||||
from ..models import Command
|
||||
from .. import utils
|
||||
from ..backends import get_command_store
|
||||
|
@ -15,39 +15,19 @@ __all__ = ['CommandListView']
|
|||
command_store = get_command_store()
|
||||
|
||||
|
||||
class CommandListView(ListView):
|
||||
class CommandListView(DatetimeSearchMixin, ListView):
|
||||
model = Command
|
||||
template_name = "terminal/command_list.html"
|
||||
context_object_name = 'command_list'
|
||||
paginate_by = settings.CONFIG.DISPLAY_PER_PAGE
|
||||
command = user = asset = system_user = date_from_s = date_to_s = ''
|
||||
command = user = asset = system_user = ""
|
||||
date_from = date_to = None
|
||||
date_format = '%m/%d/%Y'
|
||||
|
||||
def get_queryset(self):
|
||||
date_to_default = timezone.now()
|
||||
date_from_default = timezone.now() - timezone.timedelta(7)
|
||||
date_to_default_s = date_to_default.strftime(self.date_format)
|
||||
date_from_default_s = date_from_default.strftime(self.date_format)
|
||||
|
||||
self.command = self.request.GET.get('command', '')
|
||||
self.user = self.request.GET.get('user')
|
||||
self.asset = self.request.GET.get('asset')
|
||||
self.system_user = self.request.GET.get('system_user')
|
||||
self.date_from_s = self.request.GET.get('date_from', date_from_default_s)
|
||||
self.date_to_s = self.request.GET.get('date_to', date_to_default_s)
|
||||
|
||||
filter_kwargs = {}
|
||||
if self.date_from_s:
|
||||
date_from = datetime.strptime(self.date_from_s, self.date_format)
|
||||
date_from = date_from.replace(
|
||||
tzinfo=timezone.get_current_timezone()
|
||||
)
|
||||
filter_kwargs['date_from'] = date_from
|
||||
if self.date_to_s:
|
||||
date_to = timezone.datetime.strptime(
|
||||
self.date_to_s + ' 23:59:59', '%m/%d/%Y %H:%M:%S')
|
||||
date_to = date_to.replace(tzinfo=timezone.get_current_timezone())
|
||||
filter_kwargs['date_to'] = date_to
|
||||
filter_kwargs = dict()
|
||||
filter_kwargs['date_from'] = self.date_from
|
||||
filter_kwargs['date_to'] = self.date_to
|
||||
if self.user:
|
||||
filter_kwargs['user'] = self.user
|
||||
if self.asset:
|
||||
|
@ -68,8 +48,8 @@ class CommandListView(ListView):
|
|||
'asset_list': utils.get_asset_list_from_cache(),
|
||||
'system_user_list': utils.get_system_user_list_from_cache(),
|
||||
'command': self.command,
|
||||
'date_from': self.date_from_s,
|
||||
'date_to': self.date_to_s,
|
||||
'date_from': self.date_from,
|
||||
'date_to': self.date_to,
|
||||
'username': self.user,
|
||||
'asset': self.asset,
|
||||
'system_user': self.system_user,
|
||||
|
|
|
@ -1,20 +1,14 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
#
|
||||
|
||||
import time
|
||||
from datetime import datetime
|
||||
|
||||
from django.views.generic import ListView, UpdateView, DeleteView, DetailView, TemplateView
|
||||
from django.views.generic.edit import SingleObjectMixin
|
||||
from django.utils.translation import ugettext as _
|
||||
from django.utils import timezone
|
||||
from django.utils.module_loading import import_string
|
||||
from django.urls import reverse_lazy
|
||||
from django.http import HttpResponse
|
||||
from django.conf import settings
|
||||
from django.db.models import Q
|
||||
|
||||
from users.utils import AdminUserRequiredMixin
|
||||
from common.mixins import DatetimeSearchMixin
|
||||
from ..models import Session, Command, Terminal
|
||||
from ..backends import get_command_store
|
||||
from .. import utils
|
||||
|
@ -28,37 +22,24 @@ __all__ = [
|
|||
command_store = get_command_store()
|
||||
|
||||
|
||||
class SessionListView(AdminUserRequiredMixin, ListView):
|
||||
class SessionListView(AdminUserRequiredMixin, DatetimeSearchMixin, ListView):
|
||||
model = Session
|
||||
template_name = 'terminal/session_list.html'
|
||||
context_object_name = 'session_list'
|
||||
paginate_by = settings.CONFIG.DISPLAY_PER_PAGE
|
||||
user = asset = system_user = date_from_s = date_to_s = ''
|
||||
user = asset = system_user = ''
|
||||
date_from = date_to = None
|
||||
date_format = '%m/%d/%Y'
|
||||
|
||||
def get_queryset(self):
|
||||
date_to_default = timezone.now()
|
||||
date_from_default = timezone.now() - timezone.timedelta(7)
|
||||
date_to_default_s = date_to_default.strftime(self.date_format)
|
||||
date_from_default_s = date_from_default.strftime(self.date_format)
|
||||
|
||||
self.queryset = super().get_queryset()
|
||||
self.user = self.request.GET.get('user')
|
||||
self.asset = self.request.GET.get('asset')
|
||||
self.system_user = self.request.GET.get('system_user')
|
||||
self.date_from_s = self.request.GET.get('date_from', date_from_default_s)
|
||||
self.date_to_s = self.request.GET.get('date_to', date_to_default_s)
|
||||
|
||||
filter_kwargs = {}
|
||||
if self.date_from_s:
|
||||
date_from = datetime.strptime(self.date_from_s, self.date_format)
|
||||
date_from = date_from.replace(tzinfo=timezone.get_current_timezone())
|
||||
filter_kwargs['date_start__gt'] = date_from
|
||||
if self.date_to_s:
|
||||
date_to = timezone.datetime.strptime(
|
||||
self.date_to_s + ' 23:59:59', '%m/%d/%Y %H:%M:%S')
|
||||
date_to = date_to.replace(tzinfo=timezone.get_current_timezone())
|
||||
filter_kwargs['date_start__lt'] = date_to
|
||||
filter_kwargs = dict()
|
||||
filter_kwargs['date_start__gt'] = self.date_from
|
||||
filter_kwargs['date_start__lt'] = self.date_to
|
||||
if self.user:
|
||||
filter_kwargs['user'] = self.user
|
||||
if self.asset:
|
||||
|
@ -76,8 +57,8 @@ class SessionListView(AdminUserRequiredMixin, ListView):
|
|||
'user_list': utils.get_user_list_from_cache(),
|
||||
'asset_list': utils.get_asset_list_from_cache(),
|
||||
'system_user_list': utils.get_system_user_list_from_cache(),
|
||||
'date_from': self.date_from_s,
|
||||
'date_to': self.date_to_s,
|
||||
'date_from': self.date_from,
|
||||
'date_to': self.date_to,
|
||||
'username': self.user,
|
||||
'asset': self.asset,
|
||||
'system_user': self.system_user,
|
||||
|
|
|
@ -1,10 +1,10 @@
|
|||
# ~*~ coding: utf-8 ~*~
|
||||
#
|
||||
|
||||
from django.views.generic import ListView, UpdateView, DeleteView, \
|
||||
DetailView, TemplateView
|
||||
DetailView, View
|
||||
from django.contrib.auth.mixins import LoginRequiredMixin
|
||||
from django.utils.translation import ugettext as _
|
||||
from django.shortcuts import redirect
|
||||
from django.urls import reverse_lazy, reverse
|
||||
|
||||
from common.mixins import JSONResponseMixin
|
||||
|
@ -16,6 +16,7 @@ from ..hands import AdminUserRequiredMixin
|
|||
__all__ = [
|
||||
"TerminalListView", "TerminalUpdateView", "TerminalDetailView",
|
||||
"TerminalDeleteView", "TerminalConnectView", "TerminalAcceptView",
|
||||
"WebTerminalView",
|
||||
]
|
||||
|
||||
|
||||
|
@ -73,6 +74,7 @@ class TerminalAcceptView(AdminUserRequiredMixin, JSONResponseMixin, UpdateView):
|
|||
|
||||
def form_valid(self, form):
|
||||
terminal = form.save()
|
||||
terminal.create_app_user()
|
||||
terminal.is_accepted = True
|
||||
terminal.is_active = True
|
||||
terminal.save()
|
||||
|
@ -114,3 +116,8 @@ class TerminalConnectView(LoginRequiredMixin, DetailView):
|
|||
|
||||
kwargs.update(context)
|
||||
return super(TerminalConnectView, self).get_context_data(**kwargs)
|
||||
|
||||
|
||||
class WebTerminalView(LoginRequiredMixin, View):
|
||||
def get(self, request, *args, **kwargs):
|
||||
return redirect('/luna/?' + request.GET.urlencode())
|
||||
|
|
|
@ -0,0 +1,136 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
# Generated by Django 1.11 on 2017-12-21 16:06
|
||||
from __future__ import unicode_literals
|
||||
|
||||
import common.utils
|
||||
from django.contrib.auth.hashers import make_password
|
||||
from django.conf import settings
|
||||
import django.contrib.auth.models
|
||||
from django.db import migrations, models
|
||||
import django.db.models.deletion
|
||||
import django.utils.timezone
|
||||
import uuid
|
||||
|
||||
|
||||
def add_default_group(apps, schema_editor):
|
||||
group_model = apps.get_model("users", "UserGroup")
|
||||
db_alias = schema_editor.connection.alias
|
||||
group_model.objects.using(db_alias).create(
|
||||
name="Default"
|
||||
)
|
||||
|
||||
|
||||
def add_default_admin(apps, schema_editor):
|
||||
user_model = apps.get_model("users", "User")
|
||||
db_alias = schema_editor.connection.alias
|
||||
admin = user_model.objects.using(db_alias).create(
|
||||
username="admin", name="Administrator",
|
||||
email="admin@mycomany.com", role="Admin",
|
||||
password=make_password("admin"),
|
||||
)
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
initial = True
|
||||
|
||||
dependencies = [
|
||||
('auth', '0008_alter_user_username_max_length'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.CreateModel(
|
||||
name='User',
|
||||
fields=[
|
||||
('password', models.CharField(max_length=128, verbose_name='password')),
|
||||
('last_login', models.DateTimeField(blank=True, null=True, verbose_name='last login')),
|
||||
('first_name', models.CharField(blank=True, max_length=30, verbose_name='first name')),
|
||||
('last_name', models.CharField(blank=True, max_length=30, verbose_name='last name')),
|
||||
('is_active', models.BooleanField(default=True, help_text='Designates whether this user should be treated as active. Unselect this instead of deleting accounts.', verbose_name='active')),
|
||||
('date_joined', models.DateTimeField(default=django.utils.timezone.now, verbose_name='date joined')),
|
||||
('id', models.UUIDField(default=uuid.uuid4, primary_key=True, serialize=False)),
|
||||
('username', models.CharField(max_length=20, unique=True, verbose_name='Username')),
|
||||
('name', models.CharField(max_length=20, verbose_name='Name')),
|
||||
('email', models.EmailField(max_length=30, unique=True, verbose_name='Email')),
|
||||
('role', models.CharField(blank=True, choices=[('Admin', 'Administrator'), ('User', 'User'), ('App', 'Application')], default='User', max_length=10, verbose_name='Role')),
|
||||
('avatar', models.ImageField(null=True, upload_to='avatar', verbose_name='Avatar')),
|
||||
('wechat', models.CharField(blank=True, max_length=30, verbose_name='Wechat')),
|
||||
('phone', models.CharField(blank=True, max_length=20, null=True, verbose_name='Phone')),
|
||||
('enable_otp', models.BooleanField(default=False, verbose_name='Enable OTP')),
|
||||
('secret_key_otp', models.CharField(blank=True, max_length=16)),
|
||||
('_private_key', models.CharField(blank=True, max_length=5000, verbose_name='Private key')),
|
||||
('_public_key', models.CharField(blank=True, max_length=5000, verbose_name='Public key')),
|
||||
('comment', models.TextField(blank=True, max_length=200, verbose_name='Comment')),
|
||||
('is_first_login', models.BooleanField(default=False)),
|
||||
('date_expired', models.DateTimeField(blank=True, default=common.utils.date_expired_default, null=True, verbose_name='Date expired')),
|
||||
('created_by', models.CharField(default='', max_length=30, verbose_name='Created by')),
|
||||
],
|
||||
options={
|
||||
'ordering': ['username'],
|
||||
},
|
||||
managers=[
|
||||
('objects', django.contrib.auth.models.UserManager()),
|
||||
],
|
||||
),
|
||||
migrations.CreateModel(
|
||||
name='AccessKey',
|
||||
fields=[
|
||||
('id', models.UUIDField(default=uuid.uuid4, editable=False, primary_key=True, serialize=False, verbose_name='AccessKeyID')),
|
||||
('secret', models.UUIDField(default=uuid.uuid4, editable=False, verbose_name='AccessKeySecret')),
|
||||
('user', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='access_key', to=settings.AUTH_USER_MODEL, verbose_name='User')),
|
||||
],
|
||||
),
|
||||
migrations.CreateModel(
|
||||
name='LoginLog',
|
||||
fields=[
|
||||
('id', models.UUIDField(default=uuid.uuid4, primary_key=True, serialize=False)),
|
||||
('username', models.CharField(max_length=20, verbose_name='Username')),
|
||||
('type', models.CharField(choices=[('W', 'Web'), ('T', 'Terminal')], max_length=2, verbose_name='Login type')),
|
||||
('ip', models.GenericIPAddressField(verbose_name='Login ip')),
|
||||
('city', models.CharField(blank=True, max_length=254, null=True, verbose_name='Login city')),
|
||||
('user_agent', models.CharField(blank=True, max_length=254, null=True, verbose_name='User agent')),
|
||||
('datetime', models.DateTimeField(auto_now_add=True, verbose_name='Date login')),
|
||||
],
|
||||
options={
|
||||
'ordering': ['-datetime', 'username'],
|
||||
},
|
||||
),
|
||||
migrations.CreateModel(
|
||||
name='PrivateToken',
|
||||
fields=[
|
||||
('key', models.CharField(max_length=40, primary_key=True, serialize=False, verbose_name='Key')),
|
||||
('created', models.DateTimeField(auto_now_add=True, verbose_name='Created')),
|
||||
('user', models.OneToOneField(on_delete=django.db.models.deletion.CASCADE, related_name='auth_token', to=settings.AUTH_USER_MODEL, verbose_name='User')),
|
||||
],
|
||||
options={
|
||||
'verbose_name': 'Private Token',
|
||||
},
|
||||
),
|
||||
migrations.CreateModel(
|
||||
name='UserGroup',
|
||||
fields=[
|
||||
('is_discard', models.BooleanField(default=False, verbose_name='is discard')),
|
||||
('discard_time', models.DateTimeField(blank=True, null=True, verbose_name='discard time')),
|
||||
('id', models.UUIDField(default=uuid.uuid4, primary_key=True, serialize=False)),
|
||||
('name', models.CharField(max_length=128, verbose_name='Name')),
|
||||
('comment', models.TextField(blank=True, verbose_name='Comment')),
|
||||
('date_created', models.DateTimeField(auto_now_add=True, null=True, verbose_name='Date created')),
|
||||
('created_by', models.CharField(max_length=100)),
|
||||
],
|
||||
options={
|
||||
'ordering': ['name'],
|
||||
},
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='user',
|
||||
name='groups',
|
||||
field=models.ManyToManyField(blank=True, related_name='users', to='users.UserGroup', verbose_name='User group'),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='user',
|
||||
name='user_permissions',
|
||||
field=models.ManyToManyField(blank=True, help_text='Specific permissions for this user.', related_name='user_set', related_query_name='user', to='auth.Permission', verbose_name='user permissions'),
|
||||
),
|
||||
migrations.RunPython(add_default_group),
|
||||
migrations.RunPython(add_default_admin),
|
||||
]
|
|
@ -0,0 +1,35 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
# Generated by Django 1.11 on 2017-12-25 03:57
|
||||
from __future__ import unicode_literals
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('users', '0001_initial'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AlterField(
|
||||
model_name='user',
|
||||
name='email',
|
||||
field=models.EmailField(max_length=128, unique=True, verbose_name='Email'),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='user',
|
||||
name='name',
|
||||
field=models.CharField(max_length=128, verbose_name='Name'),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='user',
|
||||
name='username',
|
||||
field=models.CharField(max_length=128, unique=True, verbose_name='Username'),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='user',
|
||||
name='wechat',
|
||||
field=models.CharField(blank=True, max_length=128, verbose_name='Wechat'),
|
||||
),
|
||||
]
|
|
@ -1,16 +1,9 @@
|
|||
#!/usr/bin/env python
|
||||
# -*- coding: utf-8 -*-
|
||||
#
|
||||
|
||||
from __future__ import unicode_literals
|
||||
|
||||
import uuid
|
||||
|
||||
from django.db import models, IntegrityError
|
||||
from django.contrib.auth.models import Group
|
||||
from django.utils.translation import ugettext_lazy as _
|
||||
|
||||
from common.utils import signer, date_expired_default
|
||||
from common.mixins import NoDeleteModelMixin
|
||||
|
||||
__all__ = ['UserGroup']
|
||||
|
|
|
@ -1,7 +1,6 @@
|
|||
#!/usr/bin/env python
|
||||
# -*- coding: utf-8 -*-
|
||||
#
|
||||
import os
|
||||
import uuid
|
||||
from collections import OrderedDict
|
||||
|
||||
|
@ -15,10 +14,11 @@ from django.utils import timezone
|
|||
from django.shortcuts import reverse
|
||||
|
||||
from .group import UserGroup
|
||||
from common.utils import signer, date_expired_default
|
||||
from common.utils import get_signer, date_expired_default
|
||||
|
||||
|
||||
__all__ = ['User']
|
||||
signer = get_signer()
|
||||
|
||||
|
||||
class User(AbstractUser):
|
||||
|
@ -28,13 +28,13 @@ class User(AbstractUser):
|
|||
('App', 'Application')
|
||||
)
|
||||
id = models.UUIDField(default=uuid.uuid4, primary_key=True)
|
||||
username = models.CharField(max_length=20, unique=True, verbose_name=_('Username'))
|
||||
name = models.CharField(max_length=20, verbose_name=_('Name'))
|
||||
email = models.EmailField(max_length=30, unique=True, verbose_name=_('Email'))
|
||||
username = models.CharField(max_length=128, unique=True, verbose_name=_('Username'))
|
||||
name = models.CharField(max_length=128, verbose_name=_('Name'))
|
||||
email = models.EmailField(max_length=128, unique=True, verbose_name=_('Email'))
|
||||
groups = models.ManyToManyField(UserGroup, related_name='users', blank=True, verbose_name=_('User group'))
|
||||
role = models.CharField(choices=ROLE_CHOICES, default='User', max_length=10, blank=True, verbose_name=_('Role'))
|
||||
avatar = models.ImageField(upload_to="avatar", null=True, verbose_name=_('Avatar'))
|
||||
wechat = models.CharField(max_length=30, blank=True, verbose_name=_('Wechat'))
|
||||
wechat = models.CharField(max_length=128, blank=True, verbose_name=_('Wechat'))
|
||||
phone = models.CharField(max_length=20, blank=True, null=True, verbose_name=_('Phone'))
|
||||
enable_otp = models.BooleanField(default=False, verbose_name=_('Enable OTP'))
|
||||
secret_key_otp = models.CharField(max_length=16, blank=True)
|
||||
|
@ -212,7 +212,7 @@ class User(AbstractUser):
|
|||
def create_app_user(cls, name, comment):
|
||||
from . import AccessKey
|
||||
app = cls.objects.create(
|
||||
username=name, name=name, email='%s@local.domain'.format(),
|
||||
username=name, name=name, email='{}@local.domain'.format(name),
|
||||
is_active=False, role='App', enable_otp=False, comment=comment,
|
||||
is_first_login=False, created_by='System'
|
||||
)
|
||||
|
|
|
@ -5,10 +5,12 @@ from django.utils.translation import ugettext_lazy as _
|
|||
from rest_framework import serializers
|
||||
from rest_framework_bulk import BulkListSerializer
|
||||
|
||||
from common.utils import signer, validate_ssh_public_key
|
||||
from common.utils import get_signer, validate_ssh_public_key
|
||||
from common.mixins import BulkSerializerMixin
|
||||
from .models import User, UserGroup
|
||||
|
||||
signer = get_signer()
|
||||
|
||||
|
||||
class UserSerializer(BulkSerializerMixin, serializers.ModelSerializer):
|
||||
groups_display = serializers.SerializerMethodField()
|
||||
|
|
|
@ -56,7 +56,7 @@
|
|||
<script src="{% static 'js/plugins/datepicker/bootstrap-datepicker.js' %}"></script>
|
||||
<script>
|
||||
$(document).ready(function () {
|
||||
$('.select2').select2();
|
||||
$('.select2').select2({ dropdownAutoWidth : true, width: 'auto' });
|
||||
|
||||
$('.input-group.date').datepicker({
|
||||
format: "yyyy-mm-dd",
|
||||
|
|
|
@ -17,9 +17,10 @@
|
|||
<div class="form-group" id="date">
|
||||
<div class="input-daterange input-group" id="datepicker">
|
||||
<span class="input-group-addon"><i class="fa fa-calendar"></i></span>
|
||||
<input type="text" class="input-sm form-control" style="width: 100px;" name="date_from" value="{{ date_from }}">
|
||||
<input type="text" class="input-sm form-control" style="width: 100px;" name="date_from" value="{{ date_from|date:"m/d/Y"}}">
|
||||
{# <input type="text" class="input-sm form-control" style="width: 100px;" name="date_from" >#}
|
||||
<span class="input-group-addon">to</span>
|
||||
<input type="text" class="input-sm form-control" style="width: 100px;" name="date_to" value="{{ date_to }}">
|
||||
<input type="text" class="input-sm form-control" style="width: 100px;" name="date_to" value="{{ date_to|date:"m/d/Y"}}">
|
||||
</div>
|
||||
</div>
|
||||
<div class="input-group">
|
||||
|
@ -80,13 +81,15 @@
|
|||
"order": []
|
||||
});
|
||||
$('#date .input-daterange').datepicker({
|
||||
dateFormat: 'mm/dd/yy',
|
||||
dateFormat: "mm/dd/yyy",
|
||||
keyboardNavigation: false,
|
||||
forceParse: false,
|
||||
autoclose: true
|
||||
|
||||
});
|
||||
$('.select2').select2({
|
||||
dropdownAutoWidth: true
|
||||
dropdownAutoWidth: true,
|
||||
width: 'auto'
|
||||
});
|
||||
})
|
||||
</script>
|
||||
|
|
|
@ -30,7 +30,7 @@
|
|||
{% block custom_foot_js %}
|
||||
<script>
|
||||
$(document).ready(function () {
|
||||
$('.select2').select2();
|
||||
$('.select2').select2({ dropdownAutoWidth : true, width: 'auto' });
|
||||
}).on('click', '.field-tag', function() {
|
||||
changeField(this);
|
||||
}).on('click', '#change_all', function () {
|
||||
|
|
|
@ -253,7 +253,7 @@ function updateUserGroups(groups) {
|
|||
}
|
||||
|
||||
$(document).ready(function() {
|
||||
$('.select2').select2()
|
||||
$('.select2').select2({ dropdownAutoWidth : true, width: 'auto' });
|
||||
.on('select2:select', function(evt) {
|
||||
var data = evt.params.data;
|
||||
jumpserver.groups_selected[data.id] = data.text;
|
||||
|
|
|
@ -98,7 +98,7 @@
|
|||
{% block custom_foot_js %}
|
||||
<script>
|
||||
$(document).ready(function () {
|
||||
$('.select2').select2();
|
||||
$('.select2').select2({ dropdownAutoWidth : true, width: 'auto' });
|
||||
var options = {
|
||||
ele: $('#user_assets_table'),
|
||||
buttons: [],
|
||||
|
|
|
@ -57,7 +57,7 @@
|
|||
{% block custom_foot_js %}
|
||||
<script>
|
||||
$(document).ready(function () {
|
||||
$('.select2').select2();
|
||||
$('.select2').select2({ dropdownAutoWidth : true, width: 'auto' });
|
||||
})
|
||||
</script>
|
||||
{% endblock %}
|
||||
|
|
|
@ -150,7 +150,7 @@ function updateGroupMember(users) {
|
|||
}
|
||||
|
||||
$(document).ready(function () {
|
||||
$('.select2').select2()
|
||||
$('.select2').select2({ dropdownAutoWidth : true, width: 'auto' });
|
||||
.on('select2:select', function(evt) {
|
||||
var data = evt.params.data;
|
||||
jumpserver.users_selected[data.id] = data.text;
|
||||
|
|
|
@ -102,7 +102,7 @@
|
|||
{% block custom_foot_js %}
|
||||
<script>
|
||||
$(document).ready(function () {
|
||||
$('.select2').select2();
|
||||
$('.select2').select2({ dropdownAutoWidth : true, width: 'auto' });
|
||||
var options = {
|
||||
ele: $('#user_assets_table'),
|
||||
buttons: [],
|
||||
|
|
|
@ -22,6 +22,7 @@ from django.conf import settings
|
|||
from django.utils import timezone
|
||||
|
||||
from common.utils import get_object_or_none
|
||||
from common.mixins import DatetimeSearchMixin
|
||||
from ..models import User, LoginLog
|
||||
from ..utils import send_reset_password_mail
|
||||
from ..tasks import write_login_log_async
|
||||
|
@ -210,55 +211,38 @@ class UserFirstLoginView(LoginRequiredMixin, SessionWizardView):
|
|||
return form
|
||||
|
||||
|
||||
class LoginLogListView(ListView):
|
||||
class LoginLogListView(DatetimeSearchMixin, ListView):
|
||||
template_name = 'users/login_log_list.html'
|
||||
model = LoginLog
|
||||
paginate_by = settings.CONFIG.DISPLAY_PER_PAGE
|
||||
username = keyword = date_from_s = date_to_s = ""
|
||||
username = keyword = ""
|
||||
date_to = date_from = None
|
||||
date_format = '%m/%d/%Y'
|
||||
|
||||
def get_queryset(self):
|
||||
date_to_default = timezone.now()
|
||||
date_from_default = timezone.now() - timezone.timedelta(7)
|
||||
date_to_default_s = date_to_default.strftime(self.date_format)
|
||||
date_from_default_s = date_from_default.strftime(self.date_format)
|
||||
|
||||
self.username = self.request.GET.get('username', '')
|
||||
self.keyword = self.request.GET.get("keyword", '')
|
||||
self.date_from_s = self.request.GET.get('date_from', date_from_default_s)
|
||||
self.date_to_s = self.request.GET.get('date_to', date_to_default_s)
|
||||
|
||||
self.queryset = super().get_queryset()
|
||||
queryset = super().get_queryset()
|
||||
queryset = queryset.filter(
|
||||
datetime__gt=self.date_from, datetime__lt=self.date_to
|
||||
)
|
||||
if self.username:
|
||||
self.queryset = self.queryset.filter(username=self.username)
|
||||
if self.date_from_s:
|
||||
date_from = timezone.datetime.strptime(self.date_from_s, '%m/%d/%Y')
|
||||
date_from = date_from.replace(
|
||||
tzinfo=timezone.get_current_timezone()
|
||||
)
|
||||
self.queryset = self.queryset.filter(datetime__gt=date_from)
|
||||
if self.date_to_s:
|
||||
date_to = timezone.datetime.strptime(
|
||||
self.date_to_s + ' 23:59:59', '%m/%d/%Y %H:%M:%S'
|
||||
)
|
||||
date_to = date_to.replace(
|
||||
tzinfo=timezone.get_current_timezone()
|
||||
)
|
||||
self.queryset = self.queryset.filter(datetime__lt=date_to)
|
||||
queryset = self.queryset.filter(username=self.username)
|
||||
if self.keyword:
|
||||
self.queryset = self.queryset.filter(
|
||||
queryset = self.queryset.filter(
|
||||
Q(ip__contains=self.keyword) |
|
||||
Q(city__contains=self.keyword) |
|
||||
Q(username__contains=self.keyword)
|
||||
)
|
||||
return self.queryset
|
||||
return queryset
|
||||
|
||||
def get_context_data(self, **kwargs):
|
||||
context = {
|
||||
'app': _('Users'),
|
||||
'action': _('Login log list'),
|
||||
'date_from': self.date_from_s,
|
||||
'date_to': self.date_to_s,
|
||||
'date_from': self.date_from,
|
||||
'date_to': self.date_to,
|
||||
'username': self.username,
|
||||
'keyword': self.keyword,
|
||||
'user_list': set(LoginLog.objects.all().values_list('username', flat=True))
|
||||
|
|
|
@ -4,7 +4,7 @@
|
|||
|
||||
Jumpserver project setting file
|
||||
|
||||
:copyright: (c) 2014-2016 by Jumpserver Team.
|
||||
:copyright: (c) 2014-2017 by Jumpserver Team
|
||||
:license: GPL v2, see LICENSE for more details.
|
||||
"""
|
||||
import os
|
||||
|
@ -50,6 +50,11 @@ class Config:
|
|||
# DB_PASSWORD = ''
|
||||
# DB_NAME = 'jumpserver'
|
||||
|
||||
# When Django start it will bind this host and port
|
||||
# ./manage.py runserver 127.0.0.1:8080
|
||||
HTTP_BIND_HOST = '0.0.0.0'
|
||||
HTTP_LISTEN_PORT = 8080
|
||||
|
||||
# Use Redis as broker for celery and web socket
|
||||
REDIS_HOST = '127.0.0.1'
|
||||
REDIS_PORT = 6379
|
||||
|
@ -101,8 +106,18 @@ class Config:
|
|||
return None
|
||||
|
||||
|
||||
config = {
|
||||
'default': Config,
|
||||
}
|
||||
class DevelopmentConfig(Config):
|
||||
pass
|
||||
|
||||
|
||||
class TestConfig(Config):
|
||||
pass
|
||||
|
||||
|
||||
class ProductionConfig(Config):
|
||||
pass
|
||||
|
||||
|
||||
# Default using Config settings, you can write if/else for different env
|
||||
config = Config()
|
||||
|
||||
env = 'default'
|
||||
|
|
|
@ -1,9 +1,10 @@
|
|||
amqp==2.1.4
|
||||
six==1.11.0
|
||||
ansible==2.4.2.0
|
||||
asn1crypto==0.24.0
|
||||
bcrypt==3.1.4
|
||||
billiard==3.5.0.3
|
||||
celery==4.0.2
|
||||
celery==4.1.0
|
||||
certifi==2017.11.5
|
||||
cffi==1.11.2
|
||||
chardet==3.0.4
|
||||
|
@ -45,15 +46,16 @@ pycparser==2.18
|
|||
pycrypto==2.6.1
|
||||
pyldap==2.4.45
|
||||
PyNaCl==1.2.1
|
||||
python-gssapi==0.6.4
|
||||
pytz==2017.3
|
||||
PyYAML==3.12
|
||||
redis==2.10.6
|
||||
requests==2.18.4
|
||||
simplejson==3.13.2
|
||||
six==1.11.0
|
||||
sshpubkeys==2.2.0
|
||||
uritemplate==3.0.0
|
||||
urllib3==1.22
|
||||
vine==1.1.4
|
||||
gunicorn==19.7.1
|
||||
django_celery_beat==1.1.0
|
||||
ephem==3.7.6.0
|
||||
python-gssapi==0.6.4
|
||||
|
|
137
run_server.py
137
run_server.py
|
@ -1,55 +1,136 @@
|
|||
#!/usr/bin/env python
|
||||
# ~*~ coding: utf-8 ~*~
|
||||
|
||||
from threading import Thread
|
||||
import os
|
||||
import subprocess
|
||||
import threading
|
||||
import time
|
||||
import argparse
|
||||
import platform
|
||||
import sys
|
||||
|
||||
from apps import __version__
|
||||
|
||||
try:
|
||||
from config import config as env_config, env
|
||||
|
||||
CONFIG = env_config.get(env, 'default')()
|
||||
from config import config as CONFIG
|
||||
except ImportError:
|
||||
CONFIG = type('_', (), {'__getattr__': None})()
|
||||
|
||||
|
||||
BASE_DIR = os.path.dirname(os.path.abspath(__file__))
|
||||
APPS_DIR = os.path.join(BASE_DIR, 'apps')
|
||||
HTTP_HOST = CONFIG.HTTP_BIND_HOST or '127.0.0.1'
|
||||
HTTP_PORT = CONFIG.HTTP_LISTEN_PORT or 8080
|
||||
DEBUG = CONFIG.DEBUG
|
||||
LOG_LEVEL = CONFIG.LOG_LEVEL
|
||||
WORKERS = 4
|
||||
|
||||
apps_dir = os.path.join(BASE_DIR, 'apps')
|
||||
EXIT_EVENT = threading.Event()
|
||||
EXIT_MSGS = []
|
||||
|
||||
|
||||
def start_django():
|
||||
http_host = CONFIG.HTTP_BIND_HOST or '127.0.0.1'
|
||||
http_port = CONFIG.HTTP_LISTEN_PORT or '8080'
|
||||
os.chdir(apps_dir)
|
||||
print('start django')
|
||||
subprocess.call('python ./manage.py runserver %s:%s' % (http_host, http_port), shell=True)
|
||||
|
||||
try:
|
||||
os.makedirs(os.path.join(BASE_DIR, "data", "static"))
|
||||
os.makedirs(os.path.join(BASE_DIR, "data", "media"))
|
||||
except:
|
||||
pass
|
||||
|
||||
|
||||
def make_migrations():
|
||||
print("Check database change, make migrations")
|
||||
os.chdir(os.path.join(BASE_DIR, 'apps'))
|
||||
subprocess.call('python manage.py migrate', shell=True)
|
||||
|
||||
|
||||
def collect_static():
|
||||
print("Collect static files")
|
||||
os.chdir(os.path.join(BASE_DIR, 'apps'))
|
||||
subprocess.call('python manage.py collectstatic --no-input', shell=True)
|
||||
|
||||
|
||||
def start_gunicorn():
|
||||
print("- Start Gunicorn WSGI HTTP Server")
|
||||
make_migrations()
|
||||
collect_static()
|
||||
os.chdir(APPS_DIR)
|
||||
cmd = "gunicorn jumpserver.wsgi -b {}:{} -w {}".format(HTTP_HOST, HTTP_PORT, WORKERS)
|
||||
if DEBUG:
|
||||
cmd += " --reload"
|
||||
p = subprocess.Popen(cmd, shell=True, stdout=sys.stdout, stderr=sys.stderr)
|
||||
return p
|
||||
|
||||
|
||||
def start_celery():
|
||||
os.chdir(apps_dir)
|
||||
os.environ.setdefault('C_FORCE_ROOT', '1')
|
||||
print("- Start Celery as Distributed Task Queue")
|
||||
os.chdir(APPS_DIR)
|
||||
# Todo: Must set this environment, otherwise not no ansible result return
|
||||
os.environ.setdefault('PYTHONOPTIMIZE', '1')
|
||||
print('start celery')
|
||||
subprocess.call('celery -A common worker -B -s /tmp/celerybeat-schedule -l debug', shell=True)
|
||||
|
||||
cmd = """
|
||||
export C_FORCE_ROOT=1;celery -A common worker -l {}
|
||||
""".format(LOG_LEVEL.lower())
|
||||
|
||||
p = subprocess.Popen(cmd, shell=True, stdout=sys.stdout, stderr=sys.stderr)
|
||||
return p
|
||||
|
||||
|
||||
def main():
|
||||
t1 = Thread(target=start_django, args=())
|
||||
t2 = Thread(target=start_celery, args=())
|
||||
def start_beat():
|
||||
print("- Start Beat as Periodic Task Scheduler")
|
||||
os.chdir(APPS_DIR)
|
||||
os.environ.setdefault('PYTHONOPTIMIZE', '1')
|
||||
os.environ.setdefault('C_FORCE_ROOT', '1')
|
||||
pidfile = '/tmp/beat.pid '
|
||||
scheduler = "django_celery_beat.schedulers:DatabaseScheduler"
|
||||
options = "--pidfile {} -l {} --scheduler {} --max-interval 60".format(
|
||||
pidfile, LOG_LEVEL, scheduler,
|
||||
)
|
||||
cmd = 'celery -A common beat {} '.format(options)
|
||||
p = subprocess.Popen(cmd, shell=True, stdout=sys.stdout, stderr=sys.stderr)
|
||||
return p
|
||||
|
||||
t1.start()
|
||||
t2.start()
|
||||
|
||||
t1.join()
|
||||
t2.join()
|
||||
def start_service(services):
|
||||
print(time.ctime())
|
||||
print('Jumpserver version {}, more see https://www.jumpserver.org'.format(
|
||||
__version__))
|
||||
print('Quit the server with CONTROL-C.')
|
||||
|
||||
processes = {}
|
||||
services_all = {
|
||||
"gunicorn": start_gunicorn,
|
||||
"celery": start_celery,
|
||||
"beat": start_beat
|
||||
}
|
||||
|
||||
if 'all' in services:
|
||||
for name, func in services_all.items():
|
||||
processes[name] = func()
|
||||
else:
|
||||
for name in services:
|
||||
func = services_all.get(name)
|
||||
processes[name] = func()
|
||||
|
||||
stop_event = threading.Event()
|
||||
while not stop_event.is_set():
|
||||
for name, proc in processes.items():
|
||||
if proc.poll() is not None:
|
||||
print("\n\n" + "####"*10 + " ERROR OCCUR " + "####"*10)
|
||||
print("Start service {} [FAILED]".format(name))
|
||||
for _, p in processes.items():
|
||||
p.terminate()
|
||||
stop_event.set()
|
||||
print("Exited".format(name))
|
||||
break
|
||||
time.sleep(5)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
||||
|
||||
|
||||
|
||||
|
||||
parser = argparse.ArgumentParser(description="Jumpserver start tools")
|
||||
parser.add_argument("services", type=str, nargs='+', default="all",
|
||||
choices=("all", "gunicorn", "celery", "beat"),
|
||||
help="The service to start",
|
||||
)
|
||||
args = parser.parse_args()
|
||||
start_service(args.services)
|
||||
|
||||
|
||||
|
|
Loading…
Reference in New Issue