
# coding=utf-8# 功能: 可选择忽略指定bug表单import requests
import json
import time
from openpyxl import Workbookclass TB:def __init__(self):self.get_current_time = int(time.time())self.headers = {"password": "password"}# 忽略列表self.ignore = []# 初始urlself.initial_url = "password_url={0}"def request_parse_ret_json(self, url):# 请求,解析,返回jsonresponse = requests.get(url, headers=self.headers)json_data = json.loads(response.text)return json_datadef get_sheel_list(self):# 1.返回qa工作流下所有页签self.sheel_list = []self.sheel_name = []sheel_url = self.initial_url.format(self.get_current_time)json_data = self.request_parse_ret_json(sheel_url)url_list = (json_data[2]["hasStages"])for url in url_list:self.sheel_list.append(url["_id"])for url_list_name in url_list:self.sheel_name.append(url_list_name["name"])# 打印名字和链接对应表for i, z in (zip(self.sheel_name, self.sheel_list)):print(i + " " + z)# 可选择忽略指定bug表单for x in self.ignore:if x in self.sheel_list:self.sheel_list.remove(x)print("已获取qa工作流页签 {0} 个, 忽略页签 {1} 个".format(len(self.sheel_list), len(self.ignore)))return self.sheel_listdef get_bug_project(self):# 2. 请求每个页签下的bug表单,返回父任务urlself.bug_project = []url_text = "password"for sheel_url in self.sheel_list:_stageid = sheel_urltext = f"?filter=((_stageId={_stageid}%20AND%20_projectId=5a16372c7f0e99496df95127%20AND%20_tasklistId=5f3ced22b3e46c00442e2816%20AND%20isDone=false%20AND%20(taskLayer%20IN%20(0)%20OR%20isTopInProject%20=%20true))%20OR%20(relationsStageId=5f3e43e3c536a800441fb514%20AND%20relationsProjectId=5a16372c7f0e99496df95127%20AND%20relationsTasklistId=5f3ced22b3e46c00442e2816%20AND%20isDone=false%20AND%20(taskLayer%20IN%20(0)%20OR%20isTopInProject%20=%20true)))%20ORDER%20BY%20dueDate%20ASC&pageSize=5000&_={self.get_current_time}"new_url = url_text + textself.bug_project.append(new_url)print("已获取所有bug父条目")return self.bug_projectdef get_all_bug(self, write_excel):# 通过父任务,找到所有子任务bugall_bug_id = []print("bug_project: ", self.bug_project)# 抓取所有bug_idfor url_list_i in self.bug_project:json_data = self.request_parse_ret_json(url_list_i)json_data = json_data["result"]for i in json_data:x = i["_id"]all_bug_id.append(x)for i in all_bug_id:# 请求抓取所有bugrequest_url_text = f"https://www.teambition.com/api/tasks/{i}?_={self.get_current_time}"json_data = self.request_parse_ret_json(request_url_text)print("成功取得bug信息:", json_data)# 抓取信息 bug_id, text, executor, isDonebug_id = json_data["uniqueId"]text = json_data["content"]executor = json_data["executor"]isDone = json_data["isDone"]if type(executor) == dict:executor = executor["name"]else:executor = "None"write_excel(bug_id, text, executor, isDone)print("bug_id: ", json_data["uniqueId"], "父任务:", json_data["content"], "执行人: ", executor, "完成状态:",json_data["isDone"])# ---------可定义抓取规则------ 如果父任务没完成,则抓取父和子,忽略已完成的父任务和它下面的子任务,tb规则子任务完成才能完成父任务if json_data["subtasks"] and json_data["isDone"] == False:# 抓取所有子任务# if json_data["subtasks"]:for i in json_data["subtasks"]:bug_id = i["uniqueId"]text = i["content"]executor = i["executor"]isDone = i["isDone"]if type(executor) == dict:executor = executor["name"]else:executor = "None"write_excel(bug_id, text, executor, isDone)print("bug_id: ", i["uniqueId"], "子任务:", i["content"], "执行人: ", executor, "完成状态: ",i["isDone"])else:print("没有子任务")class Excel:def __init__(self):self.wb = Workbook()self.sheet = self.wb.activeself.aline, self.bline, self.cline, self.dline = 1, 1, 1, 1self.sheet["A%d" % (1)].value = "bug_id"self.sheet["B%d" % (1)].value = "text"self.sheet["C%d" % (1)].value = "executor"self.sheet["D%d" % (1)].value = "isDone"def write_excel(self, bug_id, text, executor, isDone):self.sheet["A%d" % (self.aline + 1)].value = "ak-" + str(bug_id)self.sheet["B%d" % (self.bline + 1)].value = textself.sheet["C%d" % (self.cline + 1)].value = executorself.sheet["D%d" % (self.dline + 1)].value = isDoneself.aline += 1self.bline += 1self.cline += 1self.dline += 1if __name__ == '__main__':tb = TB()excel = Excel()tb.get_sheel_list()tb.get_bug_project()tb.get_all_bug(excel.write_excel)excel.wb.save('buglist{0}.xlsx'.format(tb.get_current_time))
本文来自互联网用户投稿,文章观点仅代表作者本人,不代表本站立场,不承担相关法律责任。如若转载,请注明出处。 如若内容造成侵权/违法违规/事实不符,请点击【内容举报】进行投诉反馈!