cpu_median = cpu_median_points[0][‘median’]
##print(cpu_median)
##print(‘开始查询mem使用率的平均数’)
mem_mean_list = self.client.query(
“SELECt mean(used) /mean(total) from mem where time>=%s and time<=%s and host != ‘qqlog_XXX_XXX’;” % (
start_time, end_time))
print(mem_mean_list)
mem_mean_list的格式为ResultSet({’(‘mem’, None)’: [{‘time’: ‘2018-06-21T16:00:00Z’, ‘mean_mean’: 0.729324184536873}]})mem_mean_points = list(mem_mean_list.get_points(measurement=‘mem’))
mem_mean = mem_mean_points[0][‘mean_mean’]
##print(mem_mean)
##print(‘开始查询mem使用率的中位数’)
mem_median_list = self.client.query(
“SELECt median(used) /median(total) from mem where time>=%s and time<=%s AND host != ‘qqlog_XXX_XXX’ ;” % (
start_time, end_time))
##print(mem_median_list)
###mem_median_list的格式为ResultSet({’(‘mem’, None)’: [{‘time’: ‘2018-06-21T16:00:00Z’, ‘median_median’: 0.8698493636354012}]})
mem_median_points = list(mem_median_list.get_points(measurement=‘mem’))
mem_median = mem_median_points[0][‘median_median’]
##print(‘开始查询disk使用率的平均数’)
disk_mean_list = self.client.query(
“SELECt mean(used) /mean(total) from disk where time>=%s and time<=%s AND host != ‘qqlog_XXX_XXX’;” % (
start_time, end_time))
##print (disk_mean_list)
###disk_mean_list的格式为esultSet({’(‘disk’, None)’: [{‘time’: ‘2018-06-21T16:00:00Z’, ‘mean_mean’: 0.31204798557786284}]})
disk_mean_points = list(disk_mean_list.get_points(measurement=‘disk’))
disk_mean = disk_mean_points[0][‘mean_mean’]
##print(disk_mean)
##print(‘开始查询disk使用率的中位数’)
disk_median_list = self.client.query(
“SELECt median(used) /median(total) from disk where time>=%s and time<=%s and host != ‘qqlog_XXX_XXX’;” % (
start_time, end_time))
##print (disk_median_list)
###disk_median_list的格式ResultSet({’(‘disk’, None)’: [{‘time’: ‘2018-06-21T16:00:00Z’, ‘median_median’: 0.08009824336938143}]})
disk_median_points = list(disk_median_list.get_points(measurement=‘disk’))
##print(disk_median_points)
disk_median = disk_median_points[0][‘median_median’]
##print(disk_median)
将计算统计的结果放到MySQl中,以便汇总发送Reportsql_insert = "insert into weekly_dbperformance(cpu_mean,cpu_median,mem_mean,mem_median,disk_mean,disk_median) "
“values(’%s’,’%s’,’%s’,’%s’,’%s’,’%s’)” %
(cpu_mean,cpu_median,mem_mean,mem_median,disk_mean,disk_median)
cursor.execute(sql_insert)
mysqldb.commit()
def change_time(self, params):
“”"
时间转换
:param params:
:return:
“”"
item = dateutil.parser.parse(params).astimezone(pytz.timezone(‘Asia/Shanghai’))
result = str(item).split("+")[0]
response = time.strptime(result, ‘%Y-%m-%d %H:%M:%S’)
param = time.strftime(’%Y-%m-%d %H:%M:%S’, response)
return param
连接 influxdb INFLUXDB_IP influxdb所在主机 INFLUXDB_PROT influxdb端口db = DBApi(ip=‘XXX.110.119.XXX’, port=’???’)
###查询的时间范围
TypeError: strptime() argument 0 must be str, not##e_time = datetime.datetime.now()
e_time = datetime.datetime.now().strftime(’%Y-%m-%d’)
##s_time = e_time + datetime.timedelta(-7)
s_time = (datetime.datetime.now() + datetime.timedelta(-7)).strftime(’%Y-%m-%d’)
print(‘打印查询范围----时间参数如下:’)
print(e_time)
print(s_time)
db.get_use_dbperformance(s_time,e_time)
#print(disk_points)
注意:此份代码的运行环境是Python 3.6.8;此外还要注意下influxdb的query返回值的处理;可执行文件可以通过crontab设置定时任务,周期性抓取数据。
3.2 统计DB实例的慢查询 可执行文件为count_dbslow.py从elasticsearch中读取慢查询的数据,主要是统计符合条件的个数。
需要说明的是某产品线下的数据库慢查询放到Index命名一样。本例中mysql-slow-qqorders-*,是查询mysql-slow-qqorders-开通的所有慢查询的个数,qqorders是具体的产品线代码,*是日期的模糊匹配。
#coding:utf8
import os
import time
from datetime import date
导入模块 timedelta ,否则date.today()+ timedelta(days = -2) 报错: AttributeError: ‘datetime.date’ object has no attribute ‘timedelta’from datetime import timedelta
from os import walk
###导入模块的from datetime import datetime改成import datetime;否则在day = datetime.datetime.now()报错:AttributeError: type object ‘datetime.datetime’ has no attribute ‘datetime’
##from datetime import datetime
import datetime
from elasticsearch import Elasticsearch
from elasticsearch.helpers import bulk
import db_monitor_conn
mysqldb = db_monitor_conn.db
use cursorcursor = mysqldb.cursor()
###数据收集前,清除之前收集的数据
##sql_delete = "delete from weekly_dbslowqty "
##cursor.execute(sql_delete)
##mysqldb.commit()
class ElasticObj:
def __init__(self, index_name,index_type,ip =“ES实例所在的ServerIP”):
‘’’
:param index_name: 索引名称
:param index_type: 索引类型,默认为_doc
‘’’
self.index_name =index_name
self.index_type = index_type
无用户名密码状态#self.es = Elasticsearch([ip])
#用户名密码状态
self.es = Elasticsearch([ip],http_auth=(‘ES用*户*名’, ‘ES用*户*密*码’),port=ES端口号)
获取数据量def Get_SlowQty_By_Indexname(self,dstart,dend):
doc = {
“query”: {
“bool”: {
“must”: [
{“exists”:{“field”: “query_time”}},
{“range”:{
“@timestamp”: {
“gte”: dstart.strftime(’%Y-%m-%d %H:%M:%S’),
“lte”: dend.strftime(’%Y-%m-%d %H:%M:%S’),
“format”: “yyyy-MM-dd HH:mm:SS”,
“time_zone”: “+08:00”
}
}}
],
“must_not”: [
排除不符合条件的server,例如 排除 XXX.XXX.XXX.XXX{“term”: {“fields.db_host”: “XXX.110.119.XXX”}}
]
}
}
}
_slowqty = self.es.count(index=self.index_name, doc_type=self.index_type, body=doc)
print(_slowqty)
_slowqty 的返回格式是字典类型,如下{‘count’: 2374, ‘_shards’: {‘total’: 16, ‘successful’: 16, ‘skipped’: 0, ‘failed’: 0}}slowqty = _slowqty[‘count’]
print(slowqty)
将数据保存到mysql中,以便发送报表sql_insert = "insert into weekly_dbslowqty(qindex_name,qstartdate,qenddate,slowqty) "
“values(’%s’,’%s’,’%s’,’%s’)” %
(self.index_name,dstart,dend,slowqty)
cursor.execute(sql_insert)
mysqldb.commit()
obj =ElasticObj(“mysql-slow-qqorders-*”,"_doc",ip =“ES 所在机器的 ServerIP”)
###时间参数
##day = datetime.datetime.now()
##start = datetime.datetime.strptime(‘20180628 00:00:00’,’%Y%m%d %H:%M:%S’)
##end = datetime.datetime.strptime(‘20180629 00:00:00’,’%Y%m%d %H:%M:%S’)
##dstart = (datetime.datetime.now() + datetime.timedelta(-2))
##dend = (datetime.datetime.now() + datetime.timedelta(-1))
today = date.today()
dstart = (date.today()+ timedelta(days = -2)).strftime(’%Y-%m-%d’)
dend = (date.today()+ timedelta(days = -1)).strftime(’%Y-%m-%d’)
####print(dstart)
####print(dend)
###添加.strftime(’%Y-%m-%d’),,否则报错TypeError: strptime() argument 1 must be str, not datetime.date
dstart = datetime.datetime.strptime(dstart,’%Y-%m-%d’)
dend = datetime.datetime.strptime(dend,’%Y-%m-%d’)
print(dstart)
print(dend)
obj.Get_SlowQty_By_Indexname(dstart,dend)
注意:此份代码的运行环境也是Python 3.6.8
3.3 发送Server资源性能周报可执行文件为dbperformance_report_weekly.py
#!/usr/bin/python
-*- coding: UTF-8 -*-import sys
reload(sys)
sys.setdefaultencoding( “utf-8” )
import db_monitor_conn
import os
import time
import smtp_config_dbperformance
from email.mime.text import MIMEText
from email.header import Header
def send_mail(mail_msg):
调用send_mail函数mail_body = “”"
“”"
mail_body = mail_body + mail_msg + “”
message = MIMEText(mail_body, ‘html’, ‘utf-8’)
subject = ‘DB服务器性能周报[资源性能]’
message[‘Subject’] = Header(subject, ‘utf-8’)
smtp_config_dbperformance.send_mail(message)
return
#定义邮件体变量
mail_msg = “”
获取数据库连接db = db_monitor_conn.db
print(db)
使用cursor()方法获取操作游标cursor = db.cursor()
SQL 查询语句 备份日报sql_dbper_report = " select ROUND(cpu_mean,2) as cpu_mean,ROUND(cpu_median,2) as cpu_median ,ROUND(mem_mean *100 ,2)as mem_mean , "
" ROUND(mem_median *100,2) as mem_median ,ROUND(disk_mean * 100,2) as disk_mean,ROUND(disk_median *100,2) as disk_median,date_format(datetime_created, ‘%Y-%m-%d’) as datetime_created "
" FROM weekly_dbperformance "
" where 1=1"
" order by datetime_created limit 1 "
try:
执行SQL语句cursor.execute(sql_dbper_report)
获取所有记录列表results = cursor.fetchall()
for row in results:
cpu_mean = str(row[0])
cpu_median = str(row[1])
mem_mean = str(row[2])
mem_median = str(row[3])
disk_mean = str(row[4])
disk_median = str(row[5])
rdatetime = str(row[6])
生成邮件内容 注意邮件列数和参数的个数一直(mail_msg_single = “”"
%s%s%s %s%s%s %s """ % \(cpu_mean, cpu_median, mem_mean, mem_median, disk_mean, disk_median, rdatetime)
mail_msg = mail_msg + mail_msg_single
发送邮件send_mail(mail_msg)
except Exception as e:
print str(Exception)
print str(e)
关闭游标cursor.close()
关闭数据库连接db.close()
注意:此份代码的运行环境是Python 2.7.5
2.4 发送DB 慢查询周报可执行文件为dbslowlog_report_weekly.py
#!/usr/bin/python
-*- coding: UTF-8 -*-import sys
reload(sys)
sys.setdefaultencoding( “utf-8” )
import db_monitor_conn
import os
import time
import smtp_config_dbperformance
from email.mime.text import MIMEText
from email.header import Header
def send_mail(mail_msg):
调用send_mail函数mail_body = “”"
“”"
mail_body = mail_body + mail_msg + “”
message = MIMEText(mail_body, ‘html’, ‘utf-8’)
subject = ‘DB服务器性能周报[DB慢查询]’
message[‘Subject’] = Header(subject, ‘utf-8’)
smtp_config_dbperformance.send_mail(message)
return
#定义邮件体变量
mail_msg = “”
获取数据库连接db = db_monitor_conn.db
print(db)
使用cursor()方法获取操作游标cursor = db.cursor()
SQL 查询语句 备份日报sql_dbslow_report = " select distinct qstartdate,qenddate,slowqty "
" FROM weekly_dbslowqty "
" where qindex_name =‘mysql-slow-qqorders-*’ and qstartdate >= date_sub(curdate(),interval 8 day) and qstartdate < date_sub(curdate(),interval 1 day) "
" order by datetime_created asc "
try:
执行SQL语句cursor.execute(sql_dbslow_report)
获取所有记录列表results = cursor.fetchall()
for row in results:
qstartdate = str(row[0])
qenddate = str(row[1])
slowqty = str(row[2])
生成邮件内容 注意邮件列数和参数的个数一直(mail_msg_single = “”"
%s%s %s """ % \(qstartdate, qenddate, slowqty)
mail_msg = mail_msg + mail_msg_single
发送邮件send_mail(mail_msg)
except Exception as e:
print str(Exception)
print str(e)
关闭游标cursor.close()
关闭数据库连接db.close()

注意:此份代码的运行环境也是Python 2.7.5
3.5 其他模块mysql的连接模块:db_monitor_conn
相应的代码可在《通过Python将监控数据由influxdb写入到MySQL》一文中查看,参阅db_conn.py的编写,在此不再赘述。
短信发送的模块:smtp_config_dbperformance
请参阅前面的分享《MySQL数据归档小工具推荐及优化–mysql_archiver》,github地址:https://github.com/dbarun/mysql_archiver 下载的代码,有发送邮件的模块smtp_config.py,在此不再赘述。
四 实现
4.1 DBServer资源报告示样



