204 lines
8.8 KiB
Python
204 lines
8.8 KiB
Python
# -*- coding:utf-8 -*-
|
||
"""
|
||
功能:通过kibana查询logstash日志
|
||
进度:待完善....
|
||
"""
|
||
|
||
import requests
|
||
import json
|
||
from datetime import datetime, timedelta, timezone
|
||
|
||
|
||
class LogstashLogKibana:
|
||
def __init__(self, k_user="wuyonggang", k_pwd="Mima@123"):
|
||
self.kibana_host = "https://logstashlog-kibana.qc.huohua.cn/internal/bsearch"
|
||
self.kibana_user = k_user
|
||
self.kibana_pwd = k_pwd
|
||
self.r_header = {'kbn-version': '7.14.2',
|
||
'Content-Type': 'application/json; charset=gbk',
|
||
'sec-ch-ua-mobile': r'?0'}
|
||
self.request = requests.session()
|
||
self._login_kibana()
|
||
|
||
def _login_kibana(self):
|
||
url = "https://logstashlog-kibana.qc.huohua.cn/internal/security/login"
|
||
payload = json.dumps({"providerType": "basic",
|
||
"providerName": "basic",
|
||
"currentURL": "https://logstashlog-kibana.qc.huohua.cn/login?msg=LOGGED_OUT",
|
||
"params": {"username": self.kibana_user, "password": self.kibana_pwd}
|
||
})
|
||
resp = self.request.post(url, headers=self.r_header, data=payload)
|
||
print(resp)
|
||
|
||
def _format_message(self, message):
|
||
"""
|
||
格式化消息
|
||
:param message: 消息
|
||
:return: 格式化后的消息
|
||
"""
|
||
|
||
|
||
def _get_time_tamp(self, minute=10):
|
||
|
||
# 获取当前时间
|
||
current_time = datetime.utcnow().replace(tzinfo=timezone.utc)
|
||
# 计算十分钟前的时间
|
||
ten_minutes_ago = current_time - timedelta(minutes=minute)
|
||
# 格式化时间为 Elasticsearch 时间戳格式
|
||
gte_timestamp = ten_minutes_ago.strftime('%Y-%m-%dT%H:%M:%S.%f')[:-3] + 'Z'
|
||
lte_timestamp = current_time.strftime('%Y-%m-%dT%H:%M:%S.%f')[:-3] + 'Z'
|
||
return gte_timestamp, lte_timestamp
|
||
|
||
def lk_query_kibana_log(self, minutes=15, app_name=None, querys=None):
|
||
|
||
if querys is None:
|
||
querys = ['java.lang.NullPointerException']
|
||
if not (isinstance(querys, list) or isinstance(querys, dict)):
|
||
raise ValueError('querys:{},必须为列表或者字典'.format(querys))
|
||
gte_timestamp, lte_timestamp = self._get_time_tamp(minute=minutes)
|
||
query_con = []
|
||
if isinstance(querys, dict):
|
||
query_cons = {"bool": {"should": [{"match_phrase": querys}], "minimum_should_match": 1}}
|
||
else:
|
||
for query in querys:
|
||
if len(querys) == 1:
|
||
query_cons = {
|
||
"multi_match": {
|
||
"type": "phrase",
|
||
"query": query,
|
||
"lenient": True
|
||
}}
|
||
else:
|
||
query_con.append({
|
||
"multi_match": {
|
||
"type": "phrase",
|
||
"query": query,
|
||
"lenient": True
|
||
}
|
||
})
|
||
query_cons = {
|
||
"bool": {
|
||
"filter": query_con}}
|
||
|
||
url = "https://logstashlog-kibana.qc.huohua.cn/internal/bsearch"
|
||
payload = json.dumps({
|
||
"batch": [
|
||
{
|
||
"request": {
|
||
"params": {
|
||
"index": "logstash-qc-logstashlog*",
|
||
"body": {
|
||
"size": 10000,
|
||
"sort": [
|
||
{
|
||
"@timestamp": {
|
||
"order": "desc",
|
||
"unmapped_type": "boolean"
|
||
}
|
||
}
|
||
],
|
||
"version": True,
|
||
"fields": [
|
||
{
|
||
"field": "*",
|
||
"include_unmapped": "true"
|
||
},
|
||
{
|
||
"field": "@timestamp",
|
||
"format": "strict_date_optional_time"
|
||
},
|
||
{
|
||
"field": "end_data",
|
||
"format": "strict_date_optional_time"
|
||
},
|
||
{
|
||
"field": "end_date",
|
||
"format": "strict_date_optional_time"
|
||
},
|
||
{
|
||
"field": "start_date",
|
||
"format": "strict_date_optional_time"
|
||
}
|
||
],
|
||
"aggs": {
|
||
"2": {
|
||
"date_histogram": {
|
||
"field": "@timestamp",
|
||
"fixed_interval": "30m",
|
||
"time_zone": "Asia/Shanghai",
|
||
"min_doc_count": 1
|
||
}
|
||
}
|
||
},
|
||
"script_fields": {},
|
||
"stored_fields": [
|
||
"*"
|
||
],
|
||
"runtime_mappings": {},
|
||
"_source": False,
|
||
"query": {
|
||
"bool": {
|
||
"must": [],
|
||
"filter": [
|
||
query_cons,
|
||
{
|
||
"range": {
|
||
"@timestamp": {
|
||
"gte": gte_timestamp,
|
||
"lte": lte_timestamp,
|
||
"format": "strict_date_optional_time"
|
||
}
|
||
}
|
||
},
|
||
{
|
||
"match_phrase": {
|
||
"APP_NAME": app_name
|
||
}
|
||
}
|
||
],
|
||
"should": [],
|
||
"must_not": []
|
||
}
|
||
},
|
||
"highlight": {
|
||
"pre_tags": [
|
||
"@kibana-highlighted-field@"
|
||
],
|
||
"post_tags": [
|
||
"@/kibana-highlighted-field@"
|
||
],
|
||
"fields": {
|
||
"*": {}
|
||
},
|
||
"fragment_size": 2147483647
|
||
}
|
||
},
|
||
"track_total_hits": True,
|
||
"preference": 1708481407352
|
||
}
|
||
},
|
||
"options": {
|
||
# "sessionId": "473ee7d3-be00-411e-a925-71e2f669a230",
|
||
"isRestore": False,
|
||
"strategy": "ese",
|
||
"isStored": False
|
||
}
|
||
}
|
||
]
|
||
})
|
||
print(payload)
|
||
response = self.request.post(url, headers=self.r_header, data=payload)
|
||
# binary_data = response.text
|
||
# 解码为字符串并解析为 JSON 对象
|
||
json_string = response.text
|
||
json_data = json.loads(json_string)
|
||
print(json_data)
|
||
# print(response.json())
|
||
# path = os.path.join(self.data_path_list, file_name)
|
||
# with open(path, 'w+', encoding='utf-8') as f:
|
||
# f.write(response.text)
|
||
|
||
if __name__ == '__main__':
|
||
lk = LogstashLogKibana()
|
||
lk.lk_query_kibana_log(minutes=15, app_name="peppa-sparkle-scheduler",
|
||
querys=['java.lang.NullPointerException']) |