add alicloud redis and redis metrics
parent
148ebdca88
commit
40c4e86364
|
@ -19,9 +19,10 @@ huaweicloudsdkrds==3.1.11
|
||||||
huaweicloudsdkces==3.1.11
|
huaweicloudsdkces==3.1.11
|
||||||
huaweicloudsdkdcs==3.1.11
|
huaweicloudsdkdcs==3.1.11
|
||||||
alibabacloud_resourcemanager20200331==2.1.1
|
alibabacloud_resourcemanager20200331==2.1.1
|
||||||
alibabacloud_ecs20140526==2.1.1
|
alibabacloud_ecs20140526==2.1.3
|
||||||
alibabacloud_rds20140815==2.1.1
|
alibabacloud_rds20140815==2.1.2
|
||||||
alibabacloud_bssopenapi20171214==2.0.5
|
alibabacloud_r_kvstore20150101==2.20.7
|
||||||
|
alibabacloud_bssopenapi20171214==2.0.6
|
||||||
aliyun-python-sdk-cms==7.0.32
|
aliyun-python-sdk-cms==7.0.32
|
||||||
tencentcloud-sdk-python-common==3.0.770
|
tencentcloud-sdk-python-common==3.0.770
|
||||||
tencentcloud-sdk-python-cvm==3.0.770
|
tencentcloud-sdk-python-cvm==3.0.770
|
||||||
|
|
|
@ -11,12 +11,12 @@ from alibabacloud_bssopenapi20171214.client import Client as BssOpenApi20171214C
|
||||||
from alibabacloud_bssopenapi20171214 import models as bss_open_api_20171214_models
|
from alibabacloud_bssopenapi20171214 import models as bss_open_api_20171214_models
|
||||||
from alibabacloud_rds20140815.client import Client as Rds20140815Client
|
from alibabacloud_rds20140815.client import Client as Rds20140815Client
|
||||||
from alibabacloud_rds20140815 import models as rds_20140815_models
|
from alibabacloud_rds20140815 import models as rds_20140815_models
|
||||||
|
from alibabacloud_r_kvstore20150101 import models as r_kvstore_20150101_models
|
||||||
|
from alibabacloud_r_kvstore20150101.client import Client as R_kvstore20150101Client
|
||||||
|
|
||||||
import sys,datetime,hashlib
|
import sys,datetime,hashlib
|
||||||
from units import consul_kv,consul_svc
|
from units import consul_kv,consul_svc
|
||||||
from units.cloud import sync_ecs
|
from units.cloud import sync_ecs,sync_rds,sync_redis,notify
|
||||||
from units.cloud import sync_rds
|
|
||||||
from units.cloud import notify
|
|
||||||
|
|
||||||
def exp(account,collect_days,notify_days,notify_amount):
|
def exp(account,collect_days,notify_days,notify_amount):
|
||||||
#print(f"=====【阿里云:余额与到期日统计开始:{account}】", flush=True)
|
#print(f"=====【阿里云:余额与到期日统计开始:{account}】", flush=True)
|
||||||
|
@ -178,6 +178,56 @@ def ecs(account,region,isextip=False):
|
||||||
data = {'count':'无','update':f'失败','status':50000,'msg':str(e)}
|
data = {'count':'无','update':f'失败','status':50000,'msg':str(e)}
|
||||||
consul_kv.put_kv(f'ConsulManager/record/jobs/alicloud/{account}/ecs/{region}', data)
|
consul_kv.put_kv(f'ConsulManager/record/jobs/alicloud/{account}/ecs/{region}', data)
|
||||||
|
|
||||||
|
def redis(account,region):
|
||||||
|
ak,sk = consul_kv.get_aksk('alicloud',account)
|
||||||
|
now = datetime.datetime.now().strftime('%m.%d/%H:%M')
|
||||||
|
group_dict = consul_kv.get_value(f'ConsulManager/assets/alicloud/group/{account}')
|
||||||
|
|
||||||
|
config = open_api_models.Config(access_key_id=ak,access_key_secret=sk)
|
||||||
|
config.endpoint = 'r-kvstore.aliyuncs.com'
|
||||||
|
client = R_kvstore20150101Client(config)
|
||||||
|
|
||||||
|
try:
|
||||||
|
runtime = util_models.RuntimeOptions()
|
||||||
|
describe_instances_request = r_kvstore_20150101_models.DescribeInstancesRequest(
|
||||||
|
page_size=100,
|
||||||
|
region_id=region
|
||||||
|
)
|
||||||
|
redisbaseinfo = client.describe_instances_with_options(describe_instances_request, runtime)
|
||||||
|
redisbase_list = redisbaseinfo.body.to_map()['Instances']["KVStoreInstance"]
|
||||||
|
|
||||||
|
redis_dict = {i['InstanceId']:{'name':i.get('InstanceName',f"未命名{i['InstanceId']}"),
|
||||||
|
'domain':i['ConnectionDomain'],
|
||||||
|
'ip':i['PrivateIp'],
|
||||||
|
'port':i['Port'],
|
||||||
|
'region':region,
|
||||||
|
'group':group_dict.get(i['ResourceGroupId'],'无'),
|
||||||
|
'status':i['InstanceStatus'],
|
||||||
|
'itype':i['ArchitectureType'],
|
||||||
|
'ver':i['EngineVersion'],
|
||||||
|
'mem':f"{i['Capacity']}MB",
|
||||||
|
'exp': '-' if i['EndTime'] == None else i['EndTime'].split('T')[0]
|
||||||
|
} for i in redisbase_list}
|
||||||
|
|
||||||
|
count = len(redis_dict)
|
||||||
|
off,on = sync_redis.w2consul('alicloud',account,region,redis_dict)
|
||||||
|
data = {'count':count,'update':now,'status':20000,'on':on,'off':off,'msg':f'redis同步成功!总数:{count},开机:{on},关机:{off}'}
|
||||||
|
consul_kv.put_kv(f'ConsulManager/record/jobs/alicloud/{account}/redis/{region}', data)
|
||||||
|
print('【JOB】===>', 'alicloud_redis', account,region, data, flush=True)
|
||||||
|
except TeaException as e:
|
||||||
|
emsg = e.message.split('. ',1)[0]
|
||||||
|
print("【code:】",e.code,"\n【message:】",emsg, flush=True)
|
||||||
|
data = consul_kv.get_value(f'ConsulManager/record/jobs/alicloud/{account}/redis/{region}')
|
||||||
|
if data == {}:
|
||||||
|
data = {'count':'无','update':f'失败{e.code}','status':50000,'msg':emsg}
|
||||||
|
else:
|
||||||
|
data['update'] = f'失败{e.code}'
|
||||||
|
data['msg'] = emsg
|
||||||
|
consul_kv.put_kv(f'ConsulManager/record/jobs/alicloud/{account}/redis/{region}', data)
|
||||||
|
except Exception as e:
|
||||||
|
data = {'count':'无','update':f'失败','status':50000,'msg':str(e)}
|
||||||
|
consul_kv.put_kv(f'ConsulManager/record/jobs/alicloud/{account}/redis/{region}', data)
|
||||||
|
|
||||||
def rds(account,region):
|
def rds(account,region):
|
||||||
ak,sk = consul_kv.get_aksk('alicloud',account)
|
ak,sk = consul_kv.get_aksk('alicloud',account)
|
||||||
now = datetime.datetime.now().strftime('%m.%d/%H:%M')
|
now = datetime.datetime.now().strftime('%m.%d/%H:%M')
|
||||||
|
|
|
@ -25,7 +25,7 @@ def w2consul(vendor,account,region,redis_dict):
|
||||||
for k,v in redis_dict.items():
|
for k,v in redis_dict.items():
|
||||||
iid = k
|
iid = k
|
||||||
#对consul中关机的redis做标记。
|
#对consul中关机的redis做标记。
|
||||||
if v['status'] in ['SHUTDOWN']:
|
if v['status'] in ['SHUTDOWN','Unavailable','Inactive','Released']:
|
||||||
off = off + 1
|
off = off + 1
|
||||||
tags = ['OFF', v['itype'], v['ver'], region]
|
tags = ['OFF', v['itype'], v['ver'], region]
|
||||||
stat = 'off'
|
stat = 'off'
|
||||||
|
|
|
@ -15,7 +15,8 @@ def exporter(vendor,account,region):
|
||||||
metric_name_dict = {"CpuUsage":["# HELP mysql_cpu_util CPU使用率","# TYPE mysql_cpu_util gauge"],
|
metric_name_dict = {"CpuUsage":["# HELP mysql_cpu_util CPU使用率","# TYPE mysql_cpu_util gauge"],
|
||||||
"MemoryUsage":["# HELP mysql_mem_util 内存使用率","# TYPE mysql_mem_util gauge"],
|
"MemoryUsage":["# HELP mysql_mem_util 内存使用率","# TYPE mysql_mem_util gauge"],
|
||||||
"DiskUsage":["# HELP mysql_disk_util 磁盘使用率","# TYPE mysql_disk_util gauge"],
|
"DiskUsage":["# HELP mysql_disk_util 磁盘使用率","# TYPE mysql_disk_util gauge"],
|
||||||
"IOPSUsage":["# HELP mysql_io_util 磁盘I/O使用率","# TYPE mysql_io_util gauge"]
|
"IOPSUsage":["# HELP mysql_io_util 磁盘I/O使用率","# TYPE mysql_io_util gauge"],
|
||||||
|
"ConnectionUsage":["# HELP mysql_conn_util 连接数使用率","# TYPE mysql_conn_util gauge"]
|
||||||
}
|
}
|
||||||
for i in metric_name_dict.keys():
|
for i in metric_name_dict.keys():
|
||||||
request_rdsmonit.set_MetricName(i)
|
request_rdsmonit.set_MetricName(i)
|
||||||
|
|
|
@ -14,10 +14,10 @@ def exporter(vendor,account,region):
|
||||||
.build()
|
.build()
|
||||||
metric_name_dict = {"rds001_cpu_util":["# HELP mysql_cpu_util CPU使用率","# TYPE mysql_cpu_util gauge"],
|
metric_name_dict = {"rds001_cpu_util":["# HELP mysql_cpu_util CPU使用率","# TYPE mysql_cpu_util gauge"],
|
||||||
"rds002_mem_util":["# HELP mysql_mem_util 内存使用率","# TYPE mysql_mem_util gauge"],
|
"rds002_mem_util":["# HELP mysql_mem_util 内存使用率","# TYPE mysql_mem_util gauge"],
|
||||||
"rds003_iops":["# HELP mysql_iops_count 每秒I/O请求数","# TYPE mysql_iops_count gauge"],
|
|
||||||
"rds039_disk_util":["# HELP mysql_disk_util 磁盘使用率","# TYPE mysql_disk_util gauge"],
|
"rds039_disk_util":["# HELP mysql_disk_util 磁盘使用率","# TYPE mysql_disk_util gauge"],
|
||||||
"rds074_slow_queries":["# HELP mysql_slow_queries 每分钟慢SQL","# TYPE mysql_slow_queries gauge"],
|
"rds074_slow_queries":["# HELP mysql_slow_queries 每分钟慢SQL","# TYPE mysql_slow_queries gauge"],
|
||||||
"rds081_vm_ioutils":["# HELP mysql_io_util 磁盘I/O使用率","# TYPE mysql_io_util gauge"]
|
"rds081_vm_ioutils":["# HELP mysql_io_util 磁盘I/O使用率","# TYPE mysql_io_util gauge"],
|
||||||
|
"rds072_conn_usage":["# HELP mysql_conn_util 连接数使用率","# TYPE mysql_conn_util gauge"]
|
||||||
}
|
}
|
||||||
metric_body_list = []
|
metric_body_list = []
|
||||||
now = int(datetime.now().timestamp()*1000)
|
now = int(datetime.now().timestamp()*1000)
|
||||||
|
|
|
@ -14,9 +14,9 @@ def exporter(vendor,account,region):
|
||||||
req = models.GetMonitorDataRequest()
|
req = models.GetMonitorDataRequest()
|
||||||
metric_name_dict = {"CpuUseRate":["# HELP mysql_cpu_util CPU使用率","# TYPE mysql_cpu_util gauge"],
|
metric_name_dict = {"CpuUseRate":["# HELP mysql_cpu_util CPU使用率","# TYPE mysql_cpu_util gauge"],
|
||||||
"MemoryUseRate":["# HELP mysql_mem_util 内存使用率","# TYPE mysql_mem_util gauge"],
|
"MemoryUseRate":["# HELP mysql_mem_util 内存使用率","# TYPE mysql_mem_util gauge"],
|
||||||
"IOPS":["# HELP mysql_iops_count 每秒I/O请求数","# TYPE mysql_iops_count gauge"],
|
|
||||||
"VolumeRate":["# HELP mysql_disk_util 磁盘使用率","# TYPE mysql_disk_util gauge"],
|
"VolumeRate":["# HELP mysql_disk_util 磁盘使用率","# TYPE mysql_disk_util gauge"],
|
||||||
"IopsUseRate":["# HELP mysql_io_util 磁盘I/O使用率","# TYPE mysql_io_util gauge"]
|
"IopsUseRate":["# HELP mysql_io_util 磁盘I/O使用率","# TYPE mysql_io_util gauge"],
|
||||||
|
"ConnectionUseRate":["# HELP mysql_conn_util 连接数使用率","# TYPE mysql_conn_util gauge"]
|
||||||
}
|
}
|
||||||
rds_list = consul_kv.get_services_list_by_region(f'{vendor}_{account}_rds',region)
|
rds_list = consul_kv.get_services_list_by_region(f'{vendor}_{account}_rds',region)
|
||||||
rds_list = list(rds_list)
|
rds_list = list(rds_list)
|
||||||
|
|
|
@ -0,0 +1,32 @@
|
||||||
|
from aliyunsdkcore.client import AcsClient
|
||||||
|
from aliyunsdkcore.acs_exception.exceptions import ClientException
|
||||||
|
from aliyunsdkcore.acs_exception.exceptions import ServerException
|
||||||
|
from aliyunsdkcms.request.v20190101.DescribeMetricLastRequest import DescribeMetricLastRequest
|
||||||
|
from datetime import datetime
|
||||||
|
from units import consul_kv
|
||||||
|
import json
|
||||||
|
|
||||||
|
def exporter(vendor,account,region):
|
||||||
|
ak,sk = consul_kv.get_aksk(vendor,account)
|
||||||
|
client_redismonit = AcsClient(ak, sk, region)
|
||||||
|
request_redismonit = DescribeMetricLastRequest()
|
||||||
|
request_redismonit.set_accept_format('json')
|
||||||
|
request_redismonit.set_Namespace("acs_kvstore")
|
||||||
|
metric_name_dict = {"CpuUsage":["# HELP redis_cpu_util CPU使用率","# TYPE redis_cpu_util gauge"],
|
||||||
|
"MemoryUsage":["# HELP redis_mem_util 内存使用率","# TYPE redis_mem_util gauge"],
|
||||||
|
"ConnectionUsage":["# HELP redis_conn_util 连接数使用率","# TYPE redis_conn_util gauge"],
|
||||||
|
"IntranetInRatio":["# HELP redis_netin_util 写入带宽使用率","# TYPE redis_netin_util gauge"],
|
||||||
|
"IntranetOutRatio":["# HELP redis_netout_util 读取带宽使用率","# TYPE redis_netout_util gauge"]
|
||||||
|
}
|
||||||
|
for i in metric_name_dict.keys():
|
||||||
|
request_redismonit.set_MetricName(i)
|
||||||
|
response_redismonit = json.loads(client_redismonit.do_action_with_exception(request_redismonit))
|
||||||
|
instance = json.loads(response_redismonit["Datapoints"])
|
||||||
|
prom_metric_name = metric_name_dict[i][0].split()[2]
|
||||||
|
for j in instance:
|
||||||
|
iid,max,ts = j["instanceId"],j["Maximum"],j["timestamp"]
|
||||||
|
metric_name_dict[i].append(f'{prom_metric_name}{{iid="{iid}"}} {float(max)} {ts}')
|
||||||
|
prom_metric_list = []
|
||||||
|
for x in metric_name_dict.values():
|
||||||
|
prom_metric_list = prom_metric_list + x
|
||||||
|
return prom_metric_list
|
|
@ -17,7 +17,7 @@ def exporter(vendor,account,region):
|
||||||
"keyspace_hits_perc":["# HELP redis_hits_util 缓存命中率","# TYPE redis_hits_util gauge"],
|
"keyspace_hits_perc":["# HELP redis_hits_util 缓存命中率","# TYPE redis_hits_util gauge"],
|
||||||
"total_connections_received":["# HELP redis_newconn_count 每分钟新建的连接数","# TYPE redis_newconn_count gauge"],
|
"total_connections_received":["# HELP redis_newconn_count 每分钟新建的连接数","# TYPE redis_newconn_count gauge"],
|
||||||
"rx_controlled":["# HELP redis_rx_controlled 每分钟被流控的次数","# TYPE redis_rx_controlled gauge"],
|
"rx_controlled":["# HELP redis_rx_controlled 每分钟被流控的次数","# TYPE redis_rx_controlled gauge"],
|
||||||
"is_slow_log_exist":["# HELP redis_slow_log 慢日志情况","# TYPE redis_slow_log gauge"]
|
"is_slow_log_exist":["# HELP redis_slow_log 慢日志情况","# TYPE redis_slow_log gauge"],
|
||||||
}
|
}
|
||||||
metric_body_list = []
|
metric_body_list = []
|
||||||
now = int(datetime.now().timestamp()*1000)
|
now = int(datetime.now().timestamp()*1000)
|
||||||
|
@ -28,7 +28,7 @@ def exporter(vendor,account,region):
|
||||||
metric_body_list.append(MetricInfo(namespace="SYS.DCS",metric_name=i,dimensions=[MetricsDimension(name="dcs_instance_id",value=id)]))
|
metric_body_list.append(MetricInfo(namespace="SYS.DCS",metric_name=i,dimensions=[MetricsDimension(name="dcs_instance_id",value=id)]))
|
||||||
|
|
||||||
request = BatchListMetricDataRequest()
|
request = BatchListMetricDataRequest()
|
||||||
request.body = BatchListMetricDataRequestBody(to=now,_from=now-120000,filter="max",period="1",metrics=metric_body_list)
|
request.body = BatchListMetricDataRequestBody(to=now,_from=now-180000,filter="max",period="1",metrics=metric_body_list)
|
||||||
#print(now-300000,now)
|
#print(now-300000,now)
|
||||||
response = client.batch_list_metric_data(request).to_dict()
|
response = client.batch_list_metric_data(request).to_dict()
|
||||||
for i in response['metrics']:
|
for i in response['metrics']:
|
||||||
|
|
|
@ -0,0 +1,46 @@
|
||||||
|
import json
|
||||||
|
from tencentcloud.common import credential
|
||||||
|
from tencentcloud.common.profile.client_profile import ClientProfile
|
||||||
|
from tencentcloud.common.profile.http_profile import HttpProfile
|
||||||
|
from tencentcloud.common.exception.tencent_cloud_sdk_exception import TencentCloudSDKException
|
||||||
|
from tencentcloud.monitor.v20180724 import monitor_client, models
|
||||||
|
from datetime import datetime,timedelta
|
||||||
|
from units import consul_kv
|
||||||
|
|
||||||
|
def exporter(vendor,account,region):
|
||||||
|
ak,sk = consul_kv.get_aksk(vendor,account)
|
||||||
|
cred = credential.Credential(ak,sk)
|
||||||
|
client = monitor_client.MonitorClient(cred, region)
|
||||||
|
req = models.GetMonitorDataRequest()
|
||||||
|
metric_name_dict = {"CpuMaxUtil":["# HELP redis_cpu_util 实例中节点最大CPU使用率","# TYPE redis_cpu_util gauge"],
|
||||||
|
"MemMaxUtil":["# HELP redis_mem_util 实例中节点最大内存使用率","# TYPE redis_mem_util gauge"],
|
||||||
|
"ConnectionsUtil":["# HELP redis_conn_util 连接使用率","# TYPE redis_conn_util gauge"],
|
||||||
|
"CmdBigValue":["# HELP redis_big_count 每秒请求命令大小超过32KB的执行次数","# TYPE redis_big_count gauge"],
|
||||||
|
"CmdSlow":["# HELP redis_slow_count 执行时延大于slowlog-log-slower-than配置的命令次数","# TYPE redis_slow_count gauge"],
|
||||||
|
"InFlowLimit":["# HELP redis_inlimit_count 入流量触发限流的次数","# TYPE redis_inlimit_count gauge"],
|
||||||
|
"OutFlowLimit":["# HELP redis_outlimit_count 出流量触发限流的次数","# TYPE redis_outlimit_count gauge"]
|
||||||
|
}
|
||||||
|
redis_list = consul_kv.get_services_list_by_region(f'{vendor}_{account}_redis',region)
|
||||||
|
redis_list = list(redis_list)
|
||||||
|
redis_list_10 = [redis_list[i:i + 10] for i in range(0, len(redis_list), 10)]
|
||||||
|
try:
|
||||||
|
for i in metric_name_dict.keys():
|
||||||
|
for rediss in redis_list_10:
|
||||||
|
starttime = (datetime.now() + timedelta(minutes=-1)).strftime('%Y-%m-%dT%H:%M:%S+08:00')
|
||||||
|
ins_list = [{"Dimensions":[{"Name":"InstanceId","Value":x}]} for x in rediss]
|
||||||
|
params = {"Namespace":"QCE/REDIS_MEM","MetricName":i,"Period":60,"StartTime":starttime,"Instances":ins_list}
|
||||||
|
req.from_json_string(json.dumps(params))
|
||||||
|
resp = client.GetMonitorData(req)
|
||||||
|
metric_list = resp.DataPoints
|
||||||
|
for metrics in metric_list:
|
||||||
|
iid = metrics.Dimensions[0].Value
|
||||||
|
value = metrics.Values[-1]
|
||||||
|
ts = metrics.Timestamps[-1]*1000
|
||||||
|
prom_metric_name = metric_name_dict[i][0].split()[2]
|
||||||
|
metric_name_dict[i].append(f'{prom_metric_name}{{iid="{iid}"}} {float(value)} {ts}')
|
||||||
|
prom_metric_list = []
|
||||||
|
for x in metric_name_dict.values():
|
||||||
|
prom_metric_list = prom_metric_list + x
|
||||||
|
return prom_metric_list
|
||||||
|
except TencentCloudSDKException as err:
|
||||||
|
print(err)
|
|
@ -2,7 +2,7 @@ from flask import Blueprint,Response
|
||||||
from flask_restful import reqparse, Resource, Api
|
from flask_restful import reqparse, Resource, Api
|
||||||
from config import vendors,regions
|
from config import vendors,regions
|
||||||
from units import token_auth,consul_kv
|
from units import token_auth,consul_kv
|
||||||
from units.prom import mysql_huawei,mysql_ali,mysql_tencent,redis_huawei
|
from units.prom import mysql_huawei,mysql_ali,mysql_tencent,redis_huawei,redis_ali
|
||||||
import json
|
import json
|
||||||
blueprint = Blueprint('cloud_metrics',__name__)
|
blueprint = Blueprint('cloud_metrics',__name__)
|
||||||
api = Api(blueprint)
|
api = Api(blueprint)
|
||||||
|
@ -21,8 +21,8 @@ class RedisExporter(Resource):
|
||||||
def get(self,vendor,account,region):
|
def get(self,vendor,account,region):
|
||||||
if vendor == 'huaweicloud':
|
if vendor == 'huaweicloud':
|
||||||
prom_metric_list = redis_huawei.exporter(vendor,account,region)
|
prom_metric_list = redis_huawei.exporter(vendor,account,region)
|
||||||
#elif vendor == 'alicloud':
|
elif vendor == 'alicloud':
|
||||||
#prom_metric_list = mysql_ali.exporter(vendor,account,region)
|
prom_metric_list = redis_ali.exporter(vendor,account,region)
|
||||||
#elif vendor == 'tencent_cloud':
|
#elif vendor == 'tencent_cloud':
|
||||||
#prom_metric_list = mysql_tencent.exporter(vendor,account,region)
|
#prom_metric_list = mysql_tencent.exporter(vendor,account,region)
|
||||||
return Response('\n'.join(prom_metric_list).encode('utf-8'),mimetype="text/plain")
|
return Response('\n'.join(prom_metric_list).encode('utf-8'),mimetype="text/plain")
|
||||||
|
|
|
@ -32,7 +32,7 @@ export default {
|
||||||
jobrds_list: [],
|
jobrds_list: [],
|
||||||
exporter: '',
|
exporter: '',
|
||||||
cm_exporter: '',
|
cm_exporter: '',
|
||||||
configs: '该功能用于生成Prometheus的两个JOB配置,生成后请复制到Prometheus配置中:\n\n1. 选择需要同步的账号,Prometheus即可自动发现该账号下的所有DRS实例。\n\n2. 由于Mysqld_Exporter无法监控到云数据库的CPU、内存、磁盘的使用情况,所以ConsulManager开发了Exporter功能,配置到Prometheus即可直接从云厂商采集到这些指标!\n 选择需要采集指标的RDS账号区域,即可生成Prometheus的JOB配置。'
|
configs: '该功能用于生成Prometheus的两个JOB配置,生成后请复制到Prometheus配置中:\n\n1. 选择需要同步的账号,Prometheus即可自动发现该账号下的所有MySQL实例。\n\n2. 由于Mysqld_Exporter无法监控到云数据库的CPU、内存、磁盘的使用情况,所以ConsulManager开发了Exporter功能,配置到Prometheus即可直接从云厂商采集到这些指标!\n 选择需要采集指标的RDS账号区域,即可生成Prometheus的JOB配置。'
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
created() {
|
created() {
|
||||||
|
|
|
@ -59,7 +59,7 @@
|
||||||
</el-table-column>
|
</el-table-column>
|
||||||
<el-table-column prop="ver" label="版本" sortable align="center" width="80" />
|
<el-table-column prop="ver" label="版本" sortable align="center" width="80" />
|
||||||
<el-table-column prop="mem" label="内存" sortable align="center" width="90" />
|
<el-table-column prop="mem" label="内存" sortable align="center" width="90" />
|
||||||
<el-table-column prop="exp" label="到期日" sortable align="center" width="90" />
|
<el-table-column prop="exp" label="到期日" sortable align="center" width="95" />
|
||||||
<el-table-column prop="itype" label="类型" sortable align="center" width="120" show-overflow-tooltip />
|
<el-table-column prop="itype" label="类型" sortable align="center" width="120" show-overflow-tooltip />
|
||||||
<el-table-column prop="iid" label="实例ID" sortable align="center" width="150" show-overflow-tooltip />
|
<el-table-column prop="iid" label="实例ID" sortable align="center" width="150" show-overflow-tooltip />
|
||||||
<el-table-column label="操作" align="center" width="120" class-name="small-padding fixed-width">
|
<el-table-column label="操作" align="center" width="120" class-name="small-padding fixed-width">
|
||||||
|
|
|
@ -32,7 +32,7 @@ export default {
|
||||||
jobredis_list: [],
|
jobredis_list: [],
|
||||||
exporter: '',
|
exporter: '',
|
||||||
cm_exporter: '',
|
cm_exporter: '',
|
||||||
configs: '该功能用于生成Prometheus的两个JOB配置,生成后请复制到Prometheus配置中:\n\n1. 选择需要同步的账号,Prometheus即可自动发现该账号下的所有DRS实例。\n\n2. 由于Redis_Exporter无法监控到云数据库的CPU、内存、磁盘的使用情况,所以ConsulManager开发了Exporter功能,配置到Prometheus即可直接从云厂商采集到这些指标!\n 选择需要采集指标的REDIS账号区域,即可生成Prometheus的JOB配置。'
|
configs: '该功能用于生成Prometheus的两个JOB配置,生成后请复制到Prometheus配置中:\n\n1. 选择需要同步的账号,Prometheus即可自动发现该账号下的所有REDIS实例。\n\n2. 由于Redis_Exporter无法监控到云数据库的CPU、内存、磁盘的使用情况,所以ConsulManager开发了Exporter功能,配置到Prometheus即可直接从云厂商采集到这些指标!\n 选择需要采集指标的REDIS账号区域,即可生成Prometheus的JOB配置。'
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
created() {
|
created() {
|
||||||
|
|
Loading…
Reference in New Issue