增加数据库造数的接口

This commit is contained in:
qiaoxinjiu
2026-04-13 16:34:14 +08:00
commit 9183b8b0ff
29 changed files with 1263 additions and 0 deletions

3
.gitignore vendored Normal file
View File

@@ -0,0 +1,3 @@
/logs/
/venv/
.idea/

4
README.md Normal file
View File

@@ -0,0 +1,4 @@
### IT接口管理
* git clone
* pip3 install -r requirements
* gunicorn --config=gunicorn.conf.py manage:app

18
app/__init__.py Normal file
View File

@@ -0,0 +1,18 @@
from flask import Flask
from app.api.views import api
from flask_docs import ApiDoc
from logger import logger
def create_app():
app = Flask(__name__)
app.register_blueprint(api, url_prefix='/it/api')
ApiDoc(
app,
title="Effekt Interface App",
version="1.0.0",
description="Effekt Interface app API",
)
app.config["API_DOC_MEMBER"] = ["api", "platform"]
logger.info("app start-------")
return app

0
app/api/__init__.py Normal file
View File

View File

View File

@@ -0,0 +1,184 @@
# encoding: UTF-8
from datetime import date, datetime
from decimal import Decimal
from common.sqlSession import SqlSession
from common.getUserInfo import UserInfo
from common.cronRequest import CronRequest
from ..service.updateSqlProjectService import UpdateSqlProjectService
from ..model.updateSqlProjectModel import UpdateSqlProject
from const import EXECUTE_DB_CONFIG, QE_DOMAIN
from logger import logger
"""
创建和更新场景
"""
class UpdateSqlProjectController(object):
def __init__(self, req_json):
self.session = SqlSession()
self.run_env = req_json.get('runEnv')
self.sql = req_json.get('sql')
self.sql_id = req_json.get('sqlId')
self.project = req_json.get('project')
self.page_num = req_json.get('pageNo')
self.page_size = req_json.get('pageSize')
self.remark = req_json.get('remark')
self.run_group = req_json.get('runGroup')
self.creator = req_json.get('creator')
self.qe_domain = QE_DOMAIN
def create_sql_project(self):
project = self.project.strip().strip('"') if isinstance(self.project, str) else self.project
run_env = self.run_env.strip().strip('"') if isinstance(self.run_env, str) else self.run_env
sql = self.sql.strip().strip('"') if isinstance(self.sql, str) else self.sql
if not project or not run_env or not sql:
return 0, 'project、runEnv、sql 为必传参数'
sql_id = self.sql_id
if isinstance(sql_id, str):
sql_id = sql_id.strip().strip('"')
remark = self.remark.strip() if isinstance(self.remark, str) else self.remark
remark = remark if remark not in ('', None) else None
run_group = self.run_group.strip() if isinstance(self.run_group, str) else self.run_group
run_group = run_group if run_group not in ('', None) else ''
creator = self.creator.strip() if isinstance(self.creator, str) else self.creator
creator = creator if creator not in ('', None) else 'admin'
save_info = {
'project': project,
'run_env': run_env,
'sql': sql,
'remark': remark,
'run_group': run_group,
'creator': creator,
'is_delete': 0
}
if sql_id not in (None, ''):
update_res, err_msg = UpdateSqlProjectService.update_sql_project(
self.session, sql_id, save_info
)
return update_res, err_msg
create_id, err_msg = UpdateSqlProjectService.create_sql_project(self.session, save_info)
return create_id, err_msg
@staticmethod
def _format_value(value):
if isinstance(value, datetime):
return value.strftime('%Y-%m-%d %H:%M:%S')
if isinstance(value, date):
return value.strftime('%Y-%m-%d')
if isinstance(value, Decimal):
return float(value)
return value
@classmethod
def _serialize_item(cls, item):
item_dict = item.to_dict()
for key, value in item_dict.items():
item_dict[key] = cls._format_value(value)
return item_dict
def query_smart_manage_sql_data(self):
"""
查询对应填写的sql语句列表数据
"""
page_num = self.page_num or 1
page_size = self.page_size or 20
project = self.project
creator = self.creator
run_group = self.run_group
run_env = self.run_env
if isinstance(project, str):
project = project.strip().strip('"')
if isinstance(creator, str):
creator = creator.strip().strip('"')
if isinstance(run_group, str):
run_group = run_group.replace('\u3000', ' ').strip().strip('"')
filter_list = list()
if project:
filter_list.append(UpdateSqlProject.project == project)
if creator:
filter_list.append(UpdateSqlProject.creator == creator)
if run_env:
filter_list.append(UpdateSqlProject.run_env == run_env)
if run_group:
filter_list.append(UpdateSqlProject.run_group.isnot(None))
filter_list.append(UpdateSqlProject.run_group != '')
filter_list.append(UpdateSqlProject.run_group == run_group)
test_info, count_num = UpdateSqlProjectService.get_sql_list_by_filters(
session=self.session,
filter_list=filter_list,
page_num=page_num,
page_size=page_size
)
result_list = [self._serialize_item(item) for item in test_info]
return {'list': result_list, 'total': count_num}
def get_sql_project_detail(self):
sql_id = self.sql_id
if isinstance(sql_id, str):
sql_id = sql_id.strip().strip('"')
if not sql_id:
return {}, 'sqlId 为必传参数'
sql_project = UpdateSqlProjectService.get_sql_project_by_id(self.session, sql_id)
if not sql_project:
return {}, '未查询到对应记录!'
detail = self._serialize_item(sql_project)
detail.pop('is_delete', None)
return detail, ''
def delete_sql_project(self):
sql_id = self.sql_id
if isinstance(sql_id, str):
sql_id = sql_id.strip().strip('"')
if not sql_id:
return 0, 'sqlId 为必传参数'
return UpdateSqlProjectService.delete_sql_project_by_id(self.session, sql_id)
def execute_sql_project(self):
sql_id = self.sql_id
if isinstance(sql_id, str):
sql_id = sql_id.strip().strip('"')
if not sql_id:
return {}, 'sqlId 为必传参数'
sql_project = UpdateSqlProjectService.get_sql_project_by_id(self.session, sql_id)
if not sql_project:
return {}, '未查询到对应SQL记录'
project = (sql_project.project or '').strip()
run_env = (sql_project.run_env or '').strip().lower()
project_config = EXECUTE_DB_CONFIG.get(project) or EXECUTE_DB_CONFIG.get(project.upper()) or EXECUTE_DB_CONFIG.get(project.lower())
target_config = (project_config or {}).get(run_env)
if not target_config:
return {}, '未配置对应项目环境的数据库连接信息!'
execute_session = SqlSession(SqlSession.build_postgres_uri(
target_config['host'],
target_config['port'],
target_config['user'],
target_config['password'],
target_config['database']
))
try:
result = execute_session.execute(sql_project.sql)
if result.returns_rows:
rows = []
for row in result.fetchall():
row_dict = {key: self._format_value(value) for key, value in dict(row._mapping).items()}
rows.append(row_dict)
execute_session.session.rollback()
execute_session.close()
return {'sqlId': int(sql_id), 'rows': rows, 'rowCount': len(rows)}, ''
err = execute_session.done(close=False)
if err:
execute_session.close()
return {}, f'执行SQL失败{err}'
row_count = result.rowcount
execute_session.close()
return {'sqlId': int(sql_id), 'rowCount': row_count}, ''
except Exception as e:
execute_session.session.rollback()
execute_session.close()
logger.warning(f'execute_sql_project执行失败sql_id: {sql_id}, err: {e}')
return {}, f'执行SQL失败{e}'

0
app/api/dao/__init__.py Normal file
View File

View File

@@ -0,0 +1,68 @@
# encoding: UTF-8
from ..model.updateSqlProjectModel import UpdateSqlProject
from logger import logger
class UpdateSqlProjectDao(object):
@staticmethod
def get_sql_project_by_id(session, sql_id):
return session.query(UpdateSqlProject).filter(
UpdateSqlProject.id == int(sql_id), UpdateSqlProject.is_delete == 0
).first()
@staticmethod
def delete_sql_project_by_id(session, sql_id):
delete_res = session.query(UpdateSqlProject).filter(
UpdateSqlProject.id == int(sql_id), UpdateSqlProject.is_delete == 0
).update({'is_delete': 1})
err = session.done(close=False)
if err:
logger.error('delete update_sql_project db失败sql_id: {}, err: {}'.format(sql_id, err))
return 0, f'删除记录失败!{err}'
if not delete_res:
return 0, '未查询到对应记录!'
return int(sql_id), ''
@staticmethod
def create_sql_project(session, add_info):
if not isinstance(add_info, dict):
logger.error('create_sql_project不支持其他类型。')
return 0, '入参类型错误!'
sql_project_obj = UpdateSqlProject(**add_info)
session.add(sql_project_obj)
err = session.done(close=False)
create_id = sql_project_obj.id
if err:
logger.warning(f'create_sql_project新增记录失败{err}')
return 0, f'新增记录失败!{err}'
if not create_id:
logger.warning('获取update_sql_project记录id失败')
return 0, f'{add_info}获取update_sql_project记录id失败'
return create_id, ''
@staticmethod
def get_sql_by_filters(session, filter_list, page=1, limit=20):
rets = session.query(UpdateSqlProject)\
.filter(*filter_list) \
.filter(UpdateSqlProject.is_delete == 0) \
.order_by(UpdateSqlProject.created_time.desc()) \
.offset((int(page) - 1) * int(limit)) \
.limit(limit) \
.all()
total = session.query(UpdateSqlProject).filter(*filter_list).filter(
UpdateSqlProject.is_delete == 0).count()
return rets, total
@staticmethod
def update_sql_project_by_id(session, sql_id, update_info):
update_res = session.query(UpdateSqlProject).filter(
UpdateSqlProject.id == int(sql_id), UpdateSqlProject.is_delete == 0
).update(update_info)
err = session.done(close=False)
if err:
logger.error('update update_sql_project db失败sql_id: {}, update_info:{}, err: {}'.format(sql_id, update_info, err))
return 0, f'更新记录失败!{err}'
if not update_res:
return 0, '未查询到对应记录!'
return int(sql_id), ''

View File

View File

@@ -0,0 +1,30 @@
from sqlalchemy import Column, Integer, String, TIMESTAMP, text
from sqlalchemy.ext.declarative import declarative_base
from common.sqlSession import to_dict
Base = declarative_base()
Base.to_dict = to_dict
class UpdateSqlProject(Base):
__tablename__ = 'update_sql_project'
id = Column(Integer, primary_key=True, autoincrement=True, comment='id')
sql = Column(String(500), comment='sql语句')
run_env = Column(String(120), comment='运行环境')
project = Column(String(120), comment='项目')
run_group = Column(String(120), comment='对sql进行分组')
remark = Column(String(300), comment='备注')
creator = Column(String(300), comment='创建人')
is_delete = Column(Integer, default=0, comment='0未删除1已删除')
created_time = Column(TIMESTAMP, server_default=text('CURRENT_TIMESTAMP'), nullable=True, comment='创建时间')
modified_time = Column(
TIMESTAMP,
server_default=text('CURRENT_TIMESTAMP'),
server_onupdate=text('CURRENT_TIMESTAMP'),
nullable=True,
comment='修改时间'
)
def __repr__(self):
return '<update_sql_project %r>' % self.id

View File

View File

@@ -0,0 +1,37 @@
# encoding: UTF-8
from ..dao.updateSqlProjectDao import UpdateSqlProjectDao
from logger import logger
class UpdateSqlProjectService(object):
def __init__(self):
pass
@staticmethod
def create_sql_project(session, add_info):
bf_dao = UpdateSqlProjectDao()
sql_id, err_msg = bf_dao.create_sql_project(session, add_info)
return sql_id, err_msg
@staticmethod
def update_sql_project(session, sql_id, update_info):
bf_dao = UpdateSqlProjectDao()
update_res, err_msg = bf_dao.update_sql_project_by_id(session, sql_id, update_info)
return update_res, err_msg
@staticmethod
def get_sql_list_by_filters(session, page_num=1, page_size=20, filter_list=None):
bf_dao = UpdateSqlProjectDao()
filter_list = filter_list or []
bf_obj, count_num = bf_dao.get_sql_by_filters(session, filter_list, int(page_num), int(page_size))
return bf_obj, count_num
@staticmethod
def get_sql_project_by_id(session, sql_id):
bf_dao = UpdateSqlProjectDao()
return bf_dao.get_sql_project_by_id(session, sql_id)
@staticmethod
def delete_sql_project_by_id(session, sql_id):
bf_dao = UpdateSqlProjectDao()
return bf_dao.delete_sql_project_by_id(session, sql_id)

View File

43
app/api/utils/apiAuth.py Normal file
View File

@@ -0,0 +1,43 @@
# encoding: UTF-8
from logger import logger
import requests
import json
from requests.packages.urllib3.exceptions import InsecureRequestWarning
requests.packages.urllib3.disable_warnings(InsecureRequestWarning)
from urllib import parse
class apiAuth(object):
def __init__(self):
self.ops_uri = ""
self.showUsername = ""
self.username = ""
self.password = ""
self.sso_login_url = ""
self.redirect_url = ""
def getSsoToken(self):
session = requests.session()
post_data = dict()
post_data['showUsername'] = self.showUsername
post_data['username'] = self.username
post_data['password'] = self.password
session.post(url=self.sso_login_url,data=post_data,allow_redirects=True,verify=False)
resp = session.get(
url=self.redirect_url ,
allow_redirects=False ,
verify=False)
resp1 = session.get(
url=resp.headers['Location'] ,
allow_redirects=False ,
verify=False)
ssoToken = resp1.headers["Set-Cookie"].split("=")[1]
return ssoToken
if __name__ == '__main__':
test = apiAuth()
print(test.getSsoToken())

56
app/api/views.py Normal file
View File

@@ -0,0 +1,56 @@
# encoding: UTF-8
from flask import Blueprint, request
from common.apiResponse import ApiResponse
from .controller.updateSqlProjectController import UpdateSqlProjectController
api = Blueprint('api', __name__)
@api.route('/list', methods=['GET'])
def get_list():
request_args = request.args
controller = UpdateSqlProjectController(request_args)
ret = controller.query_smart_manage_sql_data()
return ApiResponse.build_success(20000, data=ret)
@api.route('/create', methods=['POST'])
def create_sql_project():
req_json = request.get_json() or {}
controller = UpdateSqlProjectController(req_json)
create_id, err_msg = controller.create_sql_project()
if err_msg:
return ApiResponse.build_failure(40009, msg=err_msg)
return ApiResponse.build_success(20000, data={'sqlId': create_id})
@api.route('/detail', methods=['GET'])
def get_sql_project_detail():
request_args = request.args
controller = UpdateSqlProjectController(request_args)
ret, err_msg = controller.get_sql_project_detail()
if err_msg:
return ApiResponse.build_failure(40011, msg=err_msg)
return ApiResponse.build_success(20000, data=ret)
@api.route('/delete', methods=['POST'])
def delete_sql_project():
req_json = request.get_json() or {}
controller = UpdateSqlProjectController(req_json)
delete_id, err_msg = controller.delete_sql_project()
if err_msg:
return ApiResponse.build_failure(40012, msg=err_msg)
return ApiResponse.build_success(20000, data={'sqlId': delete_id})
@api.route('/execute', methods=['POST'])
def execute_sql_project():
req_json = request.get_json() or {}
controller = UpdateSqlProjectController(req_json)
ret, err_msg = controller.execute_sql_project()
if err_msg:
return ApiResponse.build_failure(40009, msg=err_msg)
return ApiResponse.build_success(20000, data=ret)

0
common/__init__.py Normal file
View File

61
common/apiResponse.py Normal file
View File

@@ -0,0 +1,61 @@
# encoding: UTF-8
from flask import make_response
import json
from decimal import Decimal
from const import RES_CODE
class ApiResponse(object):
def __init__(self):
self.success = False
self.code = ''
self.message = ''
self.data = {}
@staticmethod
def build_success(code=20000, message='', data=None):
if data is None:
data = {}
response = ApiResponse()
response.success = True
response.code = code
response.message = message
response.data = data
return response.cors_response(make_response(json.dumps(response, default=obj_2_json)))
@staticmethod
def build_failure(code, msg='', data=None):
response = ApiResponse()
if data is None:
data = {}
if not msg:
response.message = RES_CODE[code]
else:
response.message = msg
response.success = False
response.code = code
response.data = data
return response.cors_response(make_response(json.dumps(response, default=obj_2_json)))
@staticmethod
def cors_response(res):
res.headers['Access-Control-Allow-Origin'] = '*'
res.headers['Access-Control-Allow-Methods'] = 'GET,POST,OPTIONS'
res.headers['Access-Control-Allow-Headers'] = 'x-requested-with,content-type'
return res
def obj_2_json(obj):
if isinstance(obj, dict):
return obj
if hasattr(obj, 'strftime'):
return obj.strftime('%Y-%m-%d %H:%M:%S')
if isinstance(obj, Decimal):
return float(obj)
return {
'success': obj.success,
'code': obj.code,
'message': obj.message,
'data': obj.data
}

98
common/cronRequest.py Normal file
View File

@@ -0,0 +1,98 @@
# -*- coding: utf-8 -*-
from const import STRESS_URI, QE_DOMAIN
from common.getRequest import Request
class CronRequest(object):
def __init__(self, token):
self.stress_api = STRESS_URI
self.headers = {'accesstoken': token, 'Accept': '*/*', 'content-type': 'application/json;charset=UTF-8'}
self.qe_domain = QE_DOMAIN
def create(self, params):
url = self.stress_api + '/back-end/stress/schedule/save'
ret = Request.go('post', url, params, self.headers)
if not ret:
return
return ret.get('id')
def pause(self, jid):
url = self.stress_api + '/back-end/stress/schedule/pause'
params = [jid]
Request.go('post', url, params, self.headers)
def resume(self, jid):
url = self.stress_api + '/back-end/stress/schedule/resume'
params = [jid]
Request.go('post', url, params, self.headers)
def remove(self, jid):
url = self.stress_api + '/back-end/stress/schedule/delete'
params = [jid]
Request.go('post', url, params, self.headers)
def update(self, req_params):
url = self.stress_api + '/back-end/stress/schedule/update'
Request.go('post', url, req_params, self.headers)
def test(self,req_params):
url = self.stress_api + '/aida/keyword/run'
print(url)
b = Request.go('post', url, req_params, self.headers)
print(b)
def scrapy(self):
url = self.stress_api + '/data/detail/scrapy'
req_params = {"team": "USER", "fileName": "", "username": "", "password": ""}
b = Request.go('post', url, req_params, self.headers)
print(b)
def detail(self):
url = self.stress_api + '/detail/list'
req_params = {"team": "USER", "fileName": "", "username": "", "password": ""}
b = Request.go('get', url, req_params, self.headers)
print(b)
def run(self):
url = "https://172.19.28.91:8088/aida//it/api/create_dialog_by_user"
# url = self.stress_api + '/create_dialog_by_user'
req_params = {"user_id":597021,"req_data":"新增用户","issue_id":41}
b = Request.go('get', url, req_params, self.headers)
print(b)
def run_sim(self,req_params):
url = "http://172.19.28.91:5012/api/aida/keyword/run"
# url = "http://10.250.201.236:5012/api/aida/keyword/run"
# url = self.stress_api + '/create_dialog_by_user'
b = Request.go('post', url, req_params, self.headers)
print(b)
def run_xiao(self,req_params):
url = "https://qe.bg.huohua.cn/back-end/it/api/list_incomplete_special_by_teams"
# url = "https://qe.bg.huohua.cn/back-end/it/api/get_team_server"
b = Request.go('post', url, req_params, self.headers)
print(b)
if __name__ == '__main__':
test = CronRequest(token="eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJhY2NvdW50X2lkIjoxNDI4MSwidXNlcl9pZCI6MTQyODEsInVzZXJfbmFtZSI6InFpYW94aW5qaXUiLCJzY29wZSI6WyJzZXJ2ZXIiXSwibmFtZSI6Iuiwr-aWsOS5hSIsImV4cCI6MTY5ODc2MzcwMCwiYXV0aG9yaXRpZXMiOlsiUk9MRV9VU0VSIl0sImp0aSI6IjZhMTg1ZWFlLTEyOGQtNDg5Yy05N2Q0LWRlOTM2NzA4ZGZmMSIsImVtYWlsIjoicWlhb3hpbmppdUBzcGFya2VkdS5jb20iLCJjbGllbnRfaWQiOiJlZmZlY3QifQ.L5WeZwyctUl-kto0rejY3PC3J1O5sksRZcA-0yQJQSg")
# a={'method_name': 'logic_public_add_user_recharge','request_parameter': '{ "phone": "", "courseId": "", "classHour": 100}', 'request_id': 1199}
# a={'method_name': 'kw_tmo_creat_lesson_classroom','request_parameter': '{ "classesId": 1200092722, "classroom_number": 1}', 'request_id': 1199}
# a={'method_name': 'logic_cc_create_new_leads', 'request_parameter': '{ "phone": "", "subject": ""}', 'request_id': 1199}
# a={'method_name': 'kw_get_class_student','request_parameter': '{ "class_id": 500911139}', 'request_id': 1199}
import json
dict_request = {'teacher_id': 'default', 'course_id': 851732, 'start_date': 'default', 'union_flag': 0, 'schedule_info_list': 'default', 'systemUserId': 10697}
a={'method_name': 'htm_public_classes_create_class','request_parameter': '{"teacher_id": "0", "course_id": 172, "start_date": "default", "union_flag": 0, "weekList": ["default"], "timeList": ["default"], "systemUserId": 586669}', 'request_id': 1199}
# a={'method_name': 'student_finish_classroom','request_parameter': '{"classroom_code": "CR2310500625595", "student_user_id": 1882444, "systemUserId": 10697}', 'request_id': 1199}
# a={'method_name': 'kw_create_test_case_robot','request_parameter': '{"systemUserId": "14263", "msg": "我有一个用例名称为testcase001用例步骤如下\n步骤1新增一个用户\n步骤2再新增一个用户\n步骤3为步骤2的用户购买逻辑思维套餐\n步骤4新建一个班级\n步骤5补差升级\n步骤6将步骤2的用户加入步骤4班级\n步骤7将步骤1的用户加入步骤4班级\n步骤8验证补差升级\n请帮忙生成自动化测试用例,请根据以上步骤结合提供的函数和函数返回信息如果有步骤没有匹配到函数则填写NOKEYWORDS代替生成一个robotframework的自动化测试用例每个步骤加上注释"}', 'request_id': 1199}
# test.test(a)
b = {'teacher_id': '0', 'course_id': 10101, 'start_date': 'default', 'union_flag': 0, 'weekList': ['default'], 'timeList': ['default'], 'systemUserId': 586669}
print(json.dumps(b))
# test.detail()
# test.scrapy()
# test.run()
# test.run_sim(a){"project_plan_id":"2282","team":""}
sss = {"project_id":2282,"project_plan_id":"2282","team":""}
test.run_xiao(req_params=sss)

36
common/feishuMessage.py Normal file
View File

@@ -0,0 +1,36 @@
import requests
import json
class FeiShuMessage:
def __init__(self):
self.headers = {'Content-Type': 'application/json; charset=utf-8'}
self.webhook = "https://open.feishu.cn/open-apis/bot/v2/hook/180fa48e-1474-448e-a3d5-1a530f6ca689"
def send_message(self, msg, url=None):
url = url if url else self.webhook
res = requests.post(url, headers=self.headers, json=msg, verify=False)
if res.status_code == 200:
return True
else:
return False
def is_valid_key_url(self, f_url):
test_msg_body = {"msg_type": "text", "content": {"text": ""}}
res = requests.post(f_url, headers=self.headers, json=test_msg_body, verify=False)
if res.status_code == 200:
code = json.loads(res.text)['code']
if code == 19024:
return True, ''
else:
return False, '不是有效的飞书关键字链接,请检查!'
else:
return False, '网络异常请稍后重试'
if __name__ == '__main__':
test = FeiShuMessage()
msg = req_body = {"msg_type": "text", "content": {"text": ""}}
url = "https://open.feishu.cn/open-apis/bot/v2/hook/180fa48e-1474-448e-a3d5-1a530f6ca689"
print(test.is_valid_key_url(url))

41
common/getRequest.py Normal file
View File

@@ -0,0 +1,41 @@
# -*- coding: utf-8 -*-
import requests
import json
from requests.exceptions import ConnectionError
from logger import logger
class Request(object):
@classmethod
def go(cls, method, url, params, headers=None, noFormat=False):
# logger.info(f'发送{method}请求到: {url}, 参数: {params}')
try:
if method == 'get':
response = requests.get(url=url, params=params, headers=headers, timeout=200)
elif method == 'post':
response = requests.post(url=url, data=json.dumps(params), headers=headers, timeout=200)
else:
logger.error(f'暂不支持{method}方法')
return
except ConnectionRefusedError:
logger.error(f'服务请求失败:{url}')
return
except ConnectionError:
logger.error(f'服务无法链接: {url}')
return
if response.status_code != 200:
logger.error(f'返回码不等于200请检查服务{response.status_code}, {response.text}')
else:
resp_json = response.json()
# logger.info(f'返回内容:{resp_json}')
# noFormat: 不需要对返回内容进行校验直接返回整个response
if noFormat:
return resp_json
# 对response做校验返回体为qe平台的通用格式
if resp_json.get('success') or resp_json.get('code') == 20000:
return resp_json.get('data')
else:
logger.error(resp_json)
return

24
common/getUserInfo.py Normal file
View File

@@ -0,0 +1,24 @@
from common.getRequest import Request
from const import STRESS_URI
class UserInfo(object):
@staticmethod
def get_user_info(access_token, url_prefix=None, info='userId'):
stress_uri = STRESS_URI
url = "/back-end/stress/user/info"
result = Request.go(method="get",
url=stress_uri + url if not url_prefix else url_prefix + url,
params=None,
headers={"accessToken": access_token})
return None if not result else result.get(info)
@staticmethod
def get_user_info_by_user_id(access_token, user_id, info):
stress_uri = STRESS_URI
url = "/back-end/stress/user/infoFromId"
result = Request.go(method="get", url=stress_uri+url, params={'userId': user_id},
headers={"accessToken": access_token})
return None if not result else result.get(info)

98
common/sqlSession.py Normal file
View File

@@ -0,0 +1,98 @@
# 创建连接相关
from sqlalchemy import create_engine, text
from sqlalchemy.orm import sessionmaker
from urllib.parse import quote_plus as urlquote
from const import sparkatp_sql_uri
from logger import logger
_ENGINE_CACHE = {}
"""
sql操作
排序order_by(ChartsName.column.desc()/asc())
limit: .offset(n)过滤前面n条数据 .limit(n)
count: .count()计数
是否存在is_exist = session.query(exists().where(Book.id > 10)).scalar()
or: .filter(or_(Chart.column == x, Chart.column > y)).all()
one: .one()只获取一条,如不存在或存在多条都会报错
first: 通过主键获取记录 filter(**).first()
"""
class SqlSession:
def __init__(self, sql_uri=sparkatp_sql_uri):
self.sql_uri = sql_uri
self._session = self.get_session()
@staticmethod
def build_postgres_uri(host, port, user, password, database):
return f"postgresql+psycopg2://{user}:{urlquote(str(password))}@{host}:{port}/{database}"
def get_session(self):
engine = _ENGINE_CACHE.get(self.sql_uri)
if engine is None:
engine = create_engine(
self.sql_uri,
pool_size=5,
max_overflow=10,
pool_pre_ping=True,
pool_recycle=1800,
pool_timeout=30,
connect_args={
'connect_timeout': 20,
'options': '-c timezone=Asia/Shanghai'
}
)
_ENGINE_CACHE[self.sql_uri] = engine
Session = sessionmaker(bind=engine)
session = Session()
return session
def query(self, obj):
return self._session.query(obj)
def add(self, added):
self._session.add(added)
def add_all(self, added_list):
if isinstance(added_list, list):
self._session.add_all(added_list)
else:
logger.warning('只能传递list')
def flush(self):
self._session.flush()
def commit(self):
self._session.commit()
def close(self):
self._session.close()
def execute(self, sql):
return self._session.execute(text(sql))
def done(self, close=True):
"""
执行完插入、删除、修改等操作后执行done如报错回滚本次事务的sql操作
:return:
"""
try:
self.commit()
if close:
self.close()
except Exception as e:
logger.warning(e)
self._session.rollback()
return e
@property
def session(self):
return self._session
def to_dict(self):
return {c.name: getattr(self, c.name, None) for c in self.__table__.columns}

85
const.py Normal file
View File

@@ -0,0 +1,85 @@
# encoding: UTF-8
import os
from urllib.parse import quote_plus as urlquote
from urllib.parse import quote
# dev环境
# BE_URL = '127.0.0.1:6080'
# online环境
BE_URL = '0.0.0.0:6080'
BASEDIR = os.path.dirname(os.path.abspath(__file__))
# PROJDIR = os.path.dirname(BASEDIR)
LOG_DIR = os.path.join(BASEDIR, 'logs')
# 返回码
RES_CODE = {
40001: 'URL不正确请检查',
40002: '不支持该请求方法!',
40003: '参数有误!',
40004: 'header错误',
40005: 'user_id不能为空! ',
40006: '构建任务遇到问题, 请稍后重试! ',
40007: '获取下拉框列表失败!',
40008: '获取接口列表失败!',
40009: '新增场景失败!',
40010: '更新用例编号失败!',
40011: '获取场景信息失败!',
40012: '更新场景失败!',
40013: 'scene_id不能为空!'
}
sparkatp_sql_uri = f'postgresql+psycopg2://postgres:{urlquote("dffa3866-dac8-49b1-a59e-725302bdfa4a")}@124.220.32.45:18366/postgres'
EXECUTE_DB_CONFIG = {
'ZHYY': {
'st': {
'host': '124.220.32.45',
'port': 18666,
'user': 'postgres',
'password': '89c75b17-1738-4b7d-b651-4c65a5a662ab',
'database': 'smart_management_st'
},
'dev': {
'host': '124.220.32.45',
'port': 18566,
'user': 'postgres',
'password': 'f267abd8-7005-472f-8cef-c1738c691c6c',
'database': 'smart_management_st'
},
'pre': {
'host': '8.137.12.32',
'port': 8096,
'user': 'sm_test_user',
'password': 'Test@736141',
'database': 'smart_management_pre'
}
},
'DLZ': {
'st': {
'host': '124.220.32.45',
'port': 18666,
'user': 'joyhub',
'password': 'e364be29-6089-4610-97d5-0037a28d0703',
'database': 'joyhub_website_st'
}
}
}
# MySQL 数据库(保留原配置供参考)
# sparkatp_sql_uri = 'mysql+pymysql://qa-dev:jaeg3SCQt0@mysql.qa.huohua.cn/sparkatp?charset=utf8mb4'
# password = urlquote("peppa@test")
USE_TEAM = ["ZHYY", "DLZ", "JOYHUB", "OA", "APP"]
# dev环境请求user_info
# STRESS_URI = 'http://stress-api.qa.huohua.cn'
# prod环境请求user_info
# STRESS_URI = 'http://stress-api.bg.huohua.cn'
STRESS_URI = 'https://qe.bg.huohua.cn'
# STRESS_URI = ' http://172.19.24.100:5012/api'
# dev环境 qe domain
# QE_DOMAIN = 'http://qe.qa.huohua.cn'
# prod环境 qe domain
QE_DOMAIN = 'https://qe.bg.huohua.cn'
PASSWORD = quote('AcUVeRb8lN')
REDIS_URL = "redis://:{}@redis.qa.cn:6379/30".format(PASSWORD)

15
gunicorn.conf.py Normal file
View File

@@ -0,0 +1,15 @@
# encoding: UTF-8
from const import BE_URL
workers = 1 # 定义同时开启的处理请求的进程数量,根据网站流量适当调整
bind = BE_URL
threads = 2 # 多线程2个暂时就够用
debug = False
reload = False
loglevel = 'debug'
pidfile = "logs/gunicorn.pid"
accesslog = "logs/access.log" # 每个接口调用会展示在access.log中
errorlog = "logs/debug.log" # 未处理的报错会在debug.log日志中展示
timeout = 300 # 每个接口的超时时间
daemon = True # 是否开启守护进程。不开启可直接在idea或命令行中查看日志在服务器上需要修改为True来开启守护进程

39
logger.py Normal file
View File

@@ -0,0 +1,39 @@
import logging
import os
from logging.handlers import TimedRotatingFileHandler
from const import LOG_DIR
class FunctionalTestsLogger(logging.Logger):
def critical(self, msg, *args, **kwargs):
super(FunctionalTestsLogger, self).critical(msg, *args, **kwargs)
raise Exception(msg)
logging.setLoggerClass(FunctionalTestsLogger)
logger = logging.getLogger(FunctionalTestsLogger.__name__)
logger.setLevel(logging.DEBUG)
LOG_FMT = logging.Formatter("%(asctime)s %(filename)-24s[:%(lineno)-4d] %(levelname)-8s %(message)s")
# log by day
# fh = TimedRotatingFileHandler(
# filename=os.path.join(LOG_DIR, f'{datetime.datetime.now().strftime("%Y%m%d")}.log'),
# when="MIDNIGHT",
# encoding='utf-8')
fh = TimedRotatingFileHandler(
filename=os.path.join(LOG_DIR, 'it-log'),
when="MIDNIGHT",
encoding='utf-8')
fh.setLevel(logging.DEBUG)
fh.setFormatter(LOG_FMT)
logger.addHandler(fh)
ch = logging.StreamHandler()
ch.setLevel(logging.DEBUG)
ch.setFormatter(LOG_FMT)
logger.addHandler(ch)
logging.basicConfig()

16
manage.py Normal file
View File

@@ -0,0 +1,16 @@
# encoding: UTF-8
from flask_cors import CORS
from flask import make_response, jsonify, request, redirect
from app import create_app
app = create_app()
CORS(app, resources=r'/*')
app.run(host="0.0.0.0", port=int("5010"), debug="debug")
def cors_response(res):
response = make_response(jsonify(res))
response.headers['Access-Control-Allow-Origin'] = '*'
response.headers['Access-Control-Allow-Methods'] = '*'
response.headers['Access-Control-Allow-Headers'] = 'x-requested-with,content-type'
return response

10
requirements.txt Normal file
View File

@@ -0,0 +1,10 @@
gunicorn~=20.1.0
Flask~=2.0.2
Flask-Cors~=3.0.10
SQLAlchemy~=1.4.35
PyMySQL~=0.10.0
python-jenkins~=1.7.0
requests~=2.26.0
Flask-Docs~=0.6.4
flask_redis~=0.4.0
jira~=3.0.1

175
resources/config.xml Normal file
View File

@@ -0,0 +1,175 @@
<?xml version="1.1" encoding="UTF-8" standalone="no"?><project>
<actions/>
<description>调度任务公共job</description>
<keepDependencies>false</keepDependencies>
<properties>
<jenkins.model.BuildDiscarderProperty>
<strategy class="hudson.tasks.LogRotator">
<daysToKeep>30</daysToKeep>
<numToKeep>100</numToKeep>
<artifactDaysToKeep>-1</artifactDaysToKeep>
<artifactNumToKeep>-1</artifactNumToKeep>
</strategy>
</jenkins.model.BuildDiscarderProperty>
<com.sonyericsson.jenkins.plugins.bfa.model.ScannerJobProperty plugin="build-failure-analyzer@2.0.0">
<doNotScan>false</doNotScan>
</com.sonyericsson.jenkins.plugins.bfa.model.ScannerJobProperty>
<com.chikli.hudson.plugin.naginator.NaginatorOptOutProperty plugin="naginator@1.18.1">
<optOut>false</optOut>
</com.chikli.hudson.plugin.naginator.NaginatorOptOutProperty>
<com.sonyericsson.rebuild.RebuildSettings plugin="rebuild@1.32">
<autoRebuild>false</autoRebuild>
<rebuildDisabled>false</rebuildDisabled>
</com.sonyericsson.rebuild.RebuildSettings>
<hudson.model.ParametersDefinitionProperty>
<parameterDefinitions>
<hudson.model.StringParameterDefinition>
<name>special_env</name>
<description>独立环境</description>
<defaultValue>qa</defaultValue>
<trim>false</trim>
</hudson.model.StringParameterDefinition>
<hudson.model.StringParameterDefinition>
<name>test_case_path</name>
<description>目录和robot都可以</description>
<trim>false</trim>
</hudson.model.StringParameterDefinition>
<hudson.model.StringParameterDefinition>
<name>test_case_id</name>
<description>需要构建的用例编号多个用逗号隔开默认空构建所有。例query_classroom-1001-正常请求有数据,query_classroom-1002-正常请求无数据</description>
<trim>false</trim>
</hudson.model.StringParameterDefinition>
<hudson.model.StringParameterDefinition>
<name>inculde</name>
<description>需要构建的用例tag多个用逗号隔开默认空构建所有。例p0,p1备注tag中不能出现关键字大写AND、OR、NOT可用小写platform-27418</description>
<trim>false</trim>
</hudson.model.StringParameterDefinition>
<hudson.model.StringParameterDefinition>
<name>exculde</name>
<description>不需要构建的用例tag多个用逗号隔开默认空不过滤。例norun,del</description>
<defaultValue>norun</defaultValue>
<trim>false</trim>
</hudson.model.StringParameterDefinition>
<hudson.model.StringParameterDefinition>
<name>rerun</name>
<description>是否需要二次构建失败用例。true/false</description>
<defaultValue>true</defaultValue>
<trim>false</trim>
</hudson.model.StringParameterDefinition>
<hudson.model.StringParameterDefinition>
<name>git_path</name>
<description>构建代码地址</description>
<trim>false</trim>
</hudson.model.StringParameterDefinition>
<hudson.model.StringParameterDefinition>
<name>team</name>
<description>组名</description>
<trim>false</trim>
</hudson.model.StringParameterDefinition>
<hudson.model.StringParameterDefinition>
<name>team_code_path</name>
<description>代码分支</description>
<trim>false</trim>
</hudson.model.StringParameterDefinition>
<hudson.model.StringParameterDefinition>
<name>build_info_id</name>
<description>本次构建对应数据库id</description>
<trim>false</trim>
</hudson.model.StringParameterDefinition>
<hudson.model.StringParameterDefinition>
<name>is_use_db</name>
<description>使用数据库中参数构建qe平台使用</description>
<defaultValue>0</defaultValue>
<trim>false</trim>
</hudson.model.StringParameterDefinition>
</parameterDefinitions>
</hudson.model.ParametersDefinitionProperty>
</properties>
<scm class="org.jenkinsci.plugins.multiplescms.MultiSCM" plugin="multiple-scms@0.8">
<scms>
<hudson.plugins.git.GitSCM plugin="git@4.3.0">
<configVersion>2</configVersion>
<userRemoteConfigs>
<hudson.plugins.git.UserRemoteConfig>
<url>${git_path}</url>
<credentialsId>a670722b-96ec-449f-a2dc-6e6676bf8dbc</credentialsId>
</hudson.plugins.git.UserRemoteConfig>
</userRemoteConfigs>
<branches>
<hudson.plugins.git.BranchSpec>
<name>*/${team_code_path}</name>
</hudson.plugins.git.BranchSpec>
</branches>
<doGenerateSubmoduleConfigurations>false</doGenerateSubmoduleConfigurations>
<gitTool> git-1.8.3.1</gitTool>
<submoduleCfg class="list"/>
<extensions>
<hudson.plugins.git.extensions.impl.RelativeTargetDirectory>
<relativeTargetDir>./${team}</relativeTargetDir>
</hudson.plugins.git.extensions.impl.RelativeTargetDirectory>
</extensions>
</hudson.plugins.git.GitSCM>
<hudson.plugins.git.GitSCM plugin="git@4.3.0">
<configVersion>2</configVersion>
<userRemoteConfigs>
<hudson.plugins.git.UserRemoteConfig>
<url>https://git.bg.huohua.cn/h2asatp/base_framework.git</url>
<credentialsId>a670722b-96ec-449f-a2dc-6e6676bf8dbc</credentialsId>
</hudson.plugins.git.UserRemoteConfig>
</userRemoteConfigs>
<branches>
<hudson.plugins.git.BranchSpec>
<name>*/master</name>
</hudson.plugins.git.BranchSpec>
</branches>
<doGenerateSubmoduleConfigurations>false</doGenerateSubmoduleConfigurations>
<gitTool> git-1.8.3.1</gitTool>
<submoduleCfg class="list"/>
<extensions>
<hudson.plugins.git.extensions.impl.RelativeTargetDirectory>
<relativeTargetDir>./base_framework</relativeTargetDir>
</hudson.plugins.git.extensions.impl.RelativeTargetDirectory>
</extensions>
</hudson.plugins.git.GitSCM>
</scms>
</scm>
<assignedNode>SparkATP</assignedNode>
<canRoam>false</canRoam>
<disabled>false</disabled>
<blockBuildWhenDownstreamBuilding>false</blockBuildWhenDownstreamBuilding>
<blockBuildWhenUpstreamBuilding>false</blockBuildWhenUpstreamBuilding>
<triggers/>
<concurrentBuild>false</concurrentBuild>
<builders>
<hudson.tasks.Shell>
<command>python3 $WORKSPACE/../sparkatp-scripts/case_runner10.py -w $WORKSPACE -c $WORKSPACE/$test_case_path -t "$test_case_id" -i "$inculde" -e "$exculde" -r "$rerun" -env "$special_env" -team "$team" -b_id "$build_info_id" -b_url "$BUILD_URL" -is_db "$is_use_db"</command>
<configuredLocalRules/>
</hudson.tasks.Shell>
</builders>
<publishers>
<hudson.plugins.robot.RobotPublisher plugin="robot@2.1.2">
<outputPath>$WORKSPACE/Report/ci_out/out</outputPath>
<reportFileName>report.html</reportFileName>
<logFileName>log.html</logFileName>
<outputFileName>output.xml</outputFileName>
<disableArchiveOutput>false</disableArchiveOutput>
<passThreshold>100.0</passThreshold>
<unstableThreshold>90.0</unstableThreshold>
<otherFiles>
<string/>
</otherFiles>
<enableCache>true</enableCache>
<onlyCritical>true</onlyCritical>
</hudson.plugins.robot.RobotPublisher>
</publishers>
<buildWrappers>
<hudson.plugins.ws__cleanup.PreBuildCleanup plugin="ws-cleanup@0.38">
<deleteDirs>false</deleteDirs>
<cleanupParameter/>
<externalDelete/>
<disableDeferredWipeout>false</disableDeferredWipeout>
</hudson.plugins.ws__cleanup.PreBuildCleanup>
<hudson.plugins.timestamper.TimestamperBuildWrapper plugin="timestamper@1.11.3"/>
</buildWrappers>
</project>

View File

@@ -0,0 +1,122 @@
<?xml version="1.1" encoding="UTF-8" standalone="no"?><project>
<actions/>
<description>支持抓取hhi环境版本</description>
<keepDependencies>false</keepDependencies>
<properties>
<jenkins.model.BuildDiscarderProperty>
<strategy class="hudson.tasks.LogRotator">
<daysToKeep>30</daysToKeep>
<numToKeep>50</numToKeep>
<artifactDaysToKeep>-1</artifactDaysToKeep>
<artifactNumToKeep>-1</artifactNumToKeep>
</strategy>
</jenkins.model.BuildDiscarderProperty>
<com.sonyericsson.jenkins.plugins.bfa.model.ScannerJobProperty plugin="build-failure-analyzer@2.0.0">
<doNotScan>false</doNotScan>
</com.sonyericsson.jenkins.plugins.bfa.model.ScannerJobProperty>
<com.chikli.hudson.plugin.naginator.NaginatorOptOutProperty plugin="naginator@1.18.1">
<optOut>false</optOut>
</com.chikli.hudson.plugin.naginator.NaginatorOptOutProperty>
<com.sonyericsson.rebuild.RebuildSettings plugin="rebuild@1.32">
<autoRebuild>false</autoRebuild>
<rebuildDisabled>false</rebuildDisabled>
</com.sonyericsson.rebuild.RebuildSettings>
<hudson.model.ParametersDefinitionProperty>
<parameterDefinitions>
<hudson.model.StringParameterDefinition>
<name>input_team</name>
<description>抓取对应组的swagger地址</description>
<defaultValue>None</defaultValue>
<trim>false</trim>
</hudson.model.StringParameterDefinition>
<hudson.model.ChoiceParameterDefinition>
<name>is_from_db</name>
<description>新服务or新增group 传False
备注如果新服务或者对应的swagger地址下有新增的group请选择False如果没有使用默认值即可</description>
<choices class="java.util.Arrays$ArrayList">
<a class="string-array">
<string>True</string>
<string>False</string>
</a>
</choices>
</hudson.model.ChoiceParameterDefinition>
<hudson.model.StringParameterDefinition>
<name>jira_id</name>
<description>如果swagger地址需要JiraID请填写PLATFORM-27598</description>
<defaultValue>None</defaultValue>
<trim>false</trim>
</hudson.model.StringParameterDefinition>
<hudson.model.StringParameterDefinition>
<name>server_name</name>
<description>独立环境server_name名字 如 peppa-classes-server</description>
<defaultValue>None</defaultValue>
<trim>false</trim>
</hudson.model.StringParameterDefinition>
<hudson.model.ChoiceParameterDefinition>
<name>access_type</name>
<description>抓取环境变量
传None表示从数据库读取抓取新服务or新group时不能填None
传hh表示抓取hh环境
传hhi表示抓取hhi环境
传all表示同时抓取hh和hhi环境会先抓hh
传allschool表示抓取allschool</description>
<choices class="java.util.Arrays$ArrayList">
<a class="string-array">
<string>hh</string>
<string>None</string>
<string>hhi</string>
<string>all</string>
<string>allschool</string>
</a>
</choices>
</hudson.model.ChoiceParameterDefinition>
</parameterDefinitions>
</hudson.model.ParametersDefinitionProperty>
</properties>
<scm class="hudson.plugins.git.GitSCM" plugin="git@4.3.0">
<configVersion>2</configVersion>
<userRemoteConfigs>
<hudson.plugins.git.UserRemoteConfig>
<url>https://git.bg.huohua.cn/h2asatp/base_framework.git</url>
<credentialsId>a670722b-96ec-449f-a2dc-6e6676bf8dbc</credentialsId>
</hudson.plugins.git.UserRemoteConfig>
</userRemoteConfigs>
<branches>
<hudson.plugins.git.BranchSpec>
<name>*/master</name>
</hudson.plugins.git.BranchSpec>
</branches>
<doGenerateSubmoduleConfigurations>false</doGenerateSubmoduleConfigurations>
<gitTool> git-1.8.3.1</gitTool>
<submoduleCfg class="list"/>
<extensions>
<hudson.plugins.git.extensions.impl.RelativeTargetDirectory>
<relativeTargetDir>./base_framework</relativeTargetDir>
</hudson.plugins.git.extensions.impl.RelativeTargetDirectory>
</extensions>
</scm>
<assignedNode>SparkATP</assignedNode>
<canRoam>false</canRoam>
<disabled>false</disabled>
<blockBuildWhenDownstreamBuilding>false</blockBuildWhenDownstreamBuilding>
<blockBuildWhenUpstreamBuilding>false</blockBuildWhenUpstreamBuilding>
<triggers/>
<concurrentBuild>false</concurrentBuild>
<builders>
<hudson.tasks.Shell>
<command>export PYTHONPATH=$WORKSPACE
cd $WORKSPACE/base_framework/platform_tools/Interface_hunter &amp;&amp; python3 swagger_job_by_url4.py input_team=$input_team is_from_db=$is_from_db jira_id=$jira_id server_name=$server_name access_type=$access_type</command>
<configuredLocalRules/>
</hudson.tasks.Shell>
</builders>
<publishers/>
<buildWrappers>
<hudson.plugins.ws__cleanup.PreBuildCleanup plugin="ws-cleanup@0.38">
<deleteDirs>false</deleteDirs>
<cleanupParameter/>
<externalDelete/>
<disableDeferredWipeout>false</disableDeferredWipeout>
</hudson.plugins.ws__cleanup.PreBuildCleanup>
</buildWrappers>
</project>