diff --git a/.gitignore b/.gitignore new file mode 100644 index 0000000..612811b --- /dev/null +++ b/.gitignore @@ -0,0 +1,10 @@ +__pycache__/ +*.py[cod] +*$py.class +.idea/ +files/image/ +report/html/ +report/pytest_html/ +report/tmp/ +*.json +logs/* diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml new file mode 100644 index 0000000..92ea900 --- /dev/null +++ b/.gitlab-ci.yml @@ -0,0 +1,68 @@ +stages: + - run + - report-webhook + - report-email + +.python-base: + image: registry.davincimotor.com/davinci-rnd/testing/davinci_dm_api:v2 + script: + - python run.py + after_script: + - echo "GL_JOB_ID=$CI_JOB_ID" >> variables.env + artifacts: + paths: + - report/pytest_html/result.html + - report/html/ + expire_in: 30 days + reports: + dotenv: variables.env + +run-testing: + stage: run + environment: testing + extends: .python-base + rules: + - if: $CI_PIPELINE_SOURCE == "schedule" || $CI_PIPELINE_SOURCE == "trigger" + when: never + - if: $CI_COMMIT_BRANCH == "testing" + +run-master: + stage: run + environment: prod + extends: .python-base + rules: + - if: $CI_PIPELINE_SOURCE == "trigger" && $CI_COMMIT_BRANCH == $CI_DEFAULT_BRANCH + when: delayed + start_in: 5 minutes + - if: $CI_COMMIT_BRANCH == $CI_DEFAULT_BRANCH + +report-webhook-master: + stage: report-webhook + environment: prod + image: !reference [.python-base, image] + rules: + - if: ($CI_PIPELINE_SOURCE == "schedule" || $CI_PIPELINE_SOURCE == "trigger") && $CI_COMMIT_BRANCH == $CI_DEFAULT_BRANCH + script: + - python tools/feishu_control.py + +report-email-testing: + stage: report-email + environment: testing + image: !reference [.python-base, image] + rules: + - if: $CI_PIPELINE_SOURCE == "schedule" || $CI_PIPELINE_SOURCE == "trigger" + when: never + - if: $CI_COMMIT_BRANCH == "testing" + script: + - python tools/officemail_control.py + +report-email-master: + stage: report-email + environment: prod + image: !reference [.python-base, image] + rules: + - if: $CI_PIPELINE_SOURCE == "schedule" || $CI_PIPELINE_SOURCE == "trigger" + when: never + - if: $CI_COMMIT_BRANCH == $CI_DEFAULT_BRANCH + script: + - python tools/officemail_control.py \ No newline at end of file diff --git a/README.md b/README.md index d9c9141..3898d1a 100644 --- a/README.md +++ b/README.md @@ -1,39 +1,40 @@ -# davinci_dm_api +本框架主要是基于 Python + pytest + allure + log + yaml +飞书通知 + Jenkins 实现的接口自动化框架。 -#### 介绍 -{**以下是 Gitee 平台说明,您可以替换此简介** -Gitee 是 OSCHINA 推出的基于 Git 的代码托管平台(同时支持 SVN)。专为开发者提供稳定、高效、安全的云端软件开发协作平台 -无论是个人、团队、或是企业,都能够用 Gitee 实现代码托管、项目管理、协作开发。企业项目请看 [https://gitee.com/enterprises](https://gitee.com/enterprises)} +一、实现功能 测试数据隔离, 实现数据驱动 。重要敏感的参数数据存储在系统变量里(如:域名,账号等), 可根据自动化巡检的入参来获取系统变量里面的参数数据去运行用例,yaml管理接口数据,可选择是否跳过用例,然后运行testcaseAutomaticControl.py 自动将yaml接口数据生成接口用例,运行用例后将结果自动发送飞书通知和邮箱通知。定制日志模块开关: 可选择是否打印每个接口的日志信息。 -#### 软件架构 -软件架构说明 - - -#### 安装教程 - -1. xxxx -2. xxxx -3. xxxx - -#### 使用说明 - -1. xxxx -2. xxxx -3. xxxx - -#### 参与贡献 - -1. Fork 本仓库 -2. 新建 Feat_xxx 分支 -3. 提交代码 -4. 新建 Pull Request - - -#### 特技 - -1. 使用 Readme\_XXX.md 来支持不同的语言,例如 Readme\_en.md, Readme\_zh.md -2. Gitee 官方博客 [blog.gitee.com](https://blog.gitee.com) -3. 你可以 [https://gitee.com/explore](https://gitee.com/explore) 这个地址来了解 Gitee 上的优秀开源项目 -4. [GVP](https://gitee.com/gvp) 全称是 Gitee 最有价值开源项目,是综合评定出的优秀开源项目 -5. Gitee 官方提供的使用手册 [https://gitee.com/help](https://gitee.com/help) -6. Gitee 封面人物是一档用来展示 Gitee 会员风采的栏目 [https://gitee.com/gitee-stars/](https://gitee.com/gitee-stars/) +├── cache // 缓存数据 +├── config // 配置 +│ ├── conf.yaml // 公共配置 +│ ├── setting.py // 文件目录存放区域 +│ ├── configs.py // 根据入参获取系统变量 +├── data // 测试用例数据 +├── images // 测试用例中的多媒体数据(如:图片) +├── lib // 对象层,用作于接口的调用 +├── log // 日志层 +├── report // 测试报告层 +├── test_case // 测试用例代码 +├── tool // 所有公共模块的封装 +│ └── aes_cryter.py // 接口参数加解密模块 +│ └── allureControl.py // allure报告数据清洗 +│ └── allureDataControl.py // allure报告数据清洗 +│ └── assertControl.py // 断言模块 +│ └── cacheControl.py // 缓存模块 +│ └── decodeToBase64.py // 响应数据解密模块 +│ └── dingtalkControl.py // 钉钉发送通知 +│ └── excelControl.py // 读取excel文件 +│ └── gettimeControl.py // 时间模块 +│ └── logControl.py // 日志模块 +│ └── logDecorator.py // 日志装饰器 +│ └── mysqlControl.py // 数据库模块 +│ └── regularControl.py // 正则模块 +│ └── requestControl.py // 请求模块 +│ └── runtimeControl.py // 响应时长统计模块 +│ └── sendmailControl.py // 发送邮件 +│ └── signdata.py // 接口鉴权模块 +│ └── sendmailControl.py // 发送邮件 +│ └── testcaseAutomaticControl.py // 自动生成测试代码 +│ └── weChatSendControl.py // 发送微信 +│ └── yamlControl.py // yaml文件 +├── Readme.md // help +├── pytest.ini +├── run.py // 运行入口 \ No newline at end of file diff --git a/cache/vin b/cache/vin new file mode 100644 index 0000000..3707423 --- /dev/null +++ b/cache/vin @@ -0,0 +1 @@ +73401134000000000 \ No newline at end of file diff --git a/config/__init__.py b/config/__init__.py new file mode 100644 index 0000000..9e5b238 --- /dev/null +++ b/config/__init__.py @@ -0,0 +1,8 @@ + +def aa(a): + if a>5: + return "ddd" + + raise ValueError("response_data、sql_data、assert_data的数据类型必须要是字典类型") + +aa(8) diff --git a/config/conf.yaml b/config/conf.yaml new file mode 100644 index 0000000..d2f860b --- /dev/null +++ b/config/conf.yaml @@ -0,0 +1,19 @@ +ProjectName: DM系统国内外接口自动化 +# 测试人员名称,作用于自动生成代码的作者,以及发送企业微信、钉钉通知的测试负责人 +TestName: 郭林莉 + +# 报告通知类型:0: 不发送通知 1、邮箱通知 2、飞书通知 +NotificationType: 2 + +# 数据库相关配置 +MySqlDB: + # 数据库开关 + switch: False + host: + user: + password: + db: + + + +real_time_update_test_cases: False diff --git a/config/configs.py b/config/configs.py new file mode 100644 index 0000000..3880787 --- /dev/null +++ b/config/configs.py @@ -0,0 +1,39 @@ +import os + + +class Config: + + def __getattr__(self, attr): + return os.environ[attr] + + + def get_host11(self): + # 判断环境,testing,staging,prod, testing-global, staging-global, + host = '' + try: + DAV_ENVIRONMENT_SLUG = Config().__getattr__('DAV_ENVIRONMENT_SLUG') + print("传入的DAV_ENVIRONMENT_SLUG是%s"%DAV_ENVIRONMENT_SLUG) + if DAV_ENVIRONMENT_SLUG == 'testing': + host = Config().__getattr__("dm_testing") + elif DAV_ENVIRONMENT_SLUG == 'staging': + host = Config().__getattr__("dm_staging") + elif DAV_ENVIRONMENT_SLUG == 'prod': + host = Config().__getattr__("dm_prod") + elif DAV_ENVIRONMENT_SLUG == 'testing-global': + host = Config().__getattr__("testing_global") + elif DAV_ENVIRONMENT_SLUG == 'staging-global': + host = Config().__getattr__("staging_global") + elif DAV_ENVIRONMENT_SLUG == 'prod-global': + host = Config().__getattr__("prod_global") + else: + host = '' + except: + testing_evn = Config().__getattr__("CI_ENVIRONMENT_SLUG") + host = Config().__getattr__("dm_%s"%testing_evn) + return host + + +if __name__ == '__main__': + # a = Config().__getattr__('dm_testing') + a = Config().get_host11() + print(a) diff --git a/config/settings.py b/config/settings.py new file mode 100644 index 0000000..249c2d9 --- /dev/null +++ b/config/settings.py @@ -0,0 +1,53 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- + +import os +import platform + +class ConfigHandler: + root_path = os.path.dirname(os.path.dirname(os.path.abspath(__file__))) + + # 测试数据路径 + data_path = os.path.join(root_path, 'data' ) + merchant_data_path = os.path.join(root_path, 'data') + + cache_path = os.path.join(root_path, 'cache' ) + + if not os.path.exists(cache_path): + os.mkdir(cache_path) + + case_path = os.path.join(root_path, 'test_case' ) + + file_path = os.path.join(root_path, 'files' ) + + + # 测试报告路径 + report_path = os.path.join(root_path, 'report') + + log_path = os.path.join(root_path , 'logs') + + info_log_path = os.path.join(root_path, 'logs' ,'info.log') + + error_log_path = os.path.join(root_path, 'logs' , 'error.log') + + warning_log_path = os.path.join(root_path, 'logs' ,'warning.log') + + if not os.path.exists(report_path): + os.mkdir(report_path) + + config_path = os.path.join(root_path, 'config' , 'conf.yaml') + + excel_path = os.path.join(root_path, 'data' ) + + # lib 存放po文件 + lib_path = os.path.join(root_path, "lib" ) + + temp_path = os.path.join(root_path, 'report' , 'tmp') + if not os.path.exists(temp_path): + os.mkdir(temp_path) + + html_path = os.path.join(root_path, 'report' ,'html') + + +if __name__ == '__main__': + print(ConfigHandler.data_path) \ No newline at end of file diff --git a/data/dm/fota/api_create_task.yaml b/data/dm/fota/api_create_task.yaml new file mode 100644 index 0000000..208a4b5 --- /dev/null +++ b/data/dm/fota/api_create_task.yaml @@ -0,0 +1,45 @@ +case_common: + allureEpic: DM系统 + allureFeature: FOTA系统 + allureStory: 新建任务 + markers: domestic + +create_task_01: + host: ${{get_host11}} + url: /tsp-server/api/index/auto/ota/create/task/list + method: POST + detail: 新建任务 + headers: + Content-Type: application/json + Authorization: $cache{login_token} +# Content-Type: $cache{case_process} + requestType: JSON + # 是否有依赖业务,为空或者false则表示没有 + is_run: + data: { + "vins": [ "73401134000000000" ], + "cycle": 1, + "taskList": [ { + "partsId": "TBOX", + "partName": " DVC7E3000003-0001", + "mainSoftwareVersion": "S0000027", + "secondSoftwareVersion": "S1000028" + } ] +} +# 请求类型:params 是以url拼接的形式请求,json则传的是json串 + assert: + status: + jsonpath: $.status + type: == + value: 200 + AssertType: + # 断言接口返回的username + message: + jsonpath: $.message + type: == + value: "success" + AssertType: + # 断言接口返回的username + sql: + teardown: + diff --git a/data/dm/fota/api_swich.yaml b/data/dm/fota/api_swich.yaml new file mode 100644 index 0000000..fefdf5c --- /dev/null +++ b/data/dm/fota/api_swich.yaml @@ -0,0 +1,36 @@ +case_common: + allureEpic: DM系统 + allureFeature: FOTA系统 + allureStory: 切换任务队列执行状态 + markers: domestic + +switch_01: + host: ${{get_host11}} + url: /tsp-server/api/index/auto/ota/turn/switch/73401134000000000 + method: POST + detail: 切换任务队列执行状态 + headers: + Content-Type: application/json + Authorization: $cache{login_token} +# Content-Type: $cache{case_process} + requestType: JSON + # 是否有依赖业务,为空或者false则表示没有 + is_run: + data: +# 请求类型:params 是以url拼接的形式请求,json则传的是json串 + assert: + status: + jsonpath: $.status + type: list_in + value: [200,500] + AssertType: + # 断言接口返回的username + message: + jsonpath: $.message + type: list_in + value: ["ok","执行状态切换成功,但启动OTA任务失败:原因:已存在升级任务,请关闭后重试", "执行状态切换成功,但启动OTA任务失败:原因:[OTA服务器异常-创建任务] OTA信息:[没有满足升级条件的车辆][-1]"] + AssertType: + sql: + teardown: + + diff --git a/data/dm/fota/aut_wakeup.yaml b/data/dm/fota/aut_wakeup.yaml new file mode 100644 index 0000000..01a093b --- /dev/null +++ b/data/dm/fota/aut_wakeup.yaml @@ -0,0 +1,69 @@ +case_common: + allureEpic: DM系统 + allureFeature: FOTA系统 + allureStory: 一键唤醒 + markers: domestic + +aut_wakeup_01: + host: ${{get_host11}} + url: /tsp-server/api/index/auto/ota/auto-wakeup + method: PUt + detail: 一键唤醒关闭 + headers: + Content-Type: application/json + Authorization: $cache{login_token} +# Content-Type: $cache{case_process} + requestType: JSON + # 是否有依赖业务,为空或者false则表示没有 + is_run: + data: + autoWakeUp: false + vin: $cache{vin} +# 请求类型:params 是以url拼接的形式请求,json则传的是json串 + assert: + status: + jsonpath: $.status + type: == + value: 200 + AssertType: + # 断言接口返回的username + message: + jsonpath: $.message + type: == + value: "ok" + AssertType: + sql: + teardown: + +aut_wakeup_02: + host: ${{get_host11}} + url: /tsp-server/api/index/auto/ota/auto-wakeup + method: PUt + detail: 一键唤醒开启 + headers: + Content-Type: application/json + Authorization: $cache{login_token} +# Content-Type: $cache{case_process} + requestType: JSON + # 是否有依赖业务,为空或者false则表示没有 + is_run: + data: + autoWakeUp: true + vin: $cache{vin} +# 请求类型:params 是以url拼接的形式请求,json则传的是json串 + assert: + status: + jsonpath: $.status + type: == + value: 200 + AssertType: + # 断言接口返回的username + message: + jsonpath: $.message + type: == + value: "ok" + AssertType: + sql: + teardown: + + diff --git a/data/dmglobal/dmglobalfota/global_api_create_task.yaml b/data/dmglobal/dmglobalfota/global_api_create_task.yaml new file mode 100644 index 0000000..9075568 --- /dev/null +++ b/data/dmglobal/dmglobalfota/global_api_create_task.yaml @@ -0,0 +1,45 @@ +case_common: + allureEpic: DM系统 + allureFeature: FOTA系统 + allureStory: 新建任务 + markers: global + +global_api_create_task_01: + host: ${{get_host11}} + url: /tsp-server/api/index/auto/ota/create/task/list + method: POST + detail: 新建任务 + headers: + Content-Type: application/json + Authorization: $cache{login_token} +# Content-Type: $cache{case_process} + requestType: JSON + # 是否有依赖业务,为空或者false则表示没有 + is_run: + data: { + "vins": ["TESTGWR0673250001"], + "cycle": 1, + "taskList": [ { + "partsId": "TBOX", + "partName": "DVC7E3000003-0001", + "mainSoftwareVersion": "SA100002", + "secondSoftwareVersion": "SE100002" + } ] +} +# 请求类型:params 是以url拼接的形式请求,json则传的是json串 + assert: + status: + jsonpath: $.status + type: == + value: 200 + AssertType: + # 断言接口返回的username + message: + jsonpath: $.message + type: == + value: "success" + AssertType: + # 断言接口返回的username + sql: + teardown: + diff --git a/data/dmglobal/dmglobalfota/global_api_swich.yaml b/data/dmglobal/dmglobalfota/global_api_swich.yaml new file mode 100644 index 0000000..8e8d8f2 --- /dev/null +++ b/data/dmglobal/dmglobalfota/global_api_swich.yaml @@ -0,0 +1,35 @@ +case_common: + allureEpic: DM系统 + allureFeature: FOTA系统 + allureStory: 切换任务队列执行状态 + markers: global + +global_api_swich_01: + host: ${{get_host11}} + url: /tsp-server/api/index/auto/ota/turn/switch/TESTGWR0673250001 + method: POST + detail: 切换任务队列执行状态--开始任务 + headers: + Content-Type: application/json + Authorization: $cache{login_token} +# Content-Type: $cache{case_process} + requestType: JSON + # 是否有依赖业务,为空或者false则表示没有 + is_run: + data: +# 请求类型:params 是以url拼接的形式请求,json则传的是json串 + assert: + status: + jsonpath: $.status + type: list_in + value: [ 200,500 ] + AssertType: + # 断言接口返回的username + message: + jsonpath: $.message + type: list_in + value: ["ok",'执行状态切换成功,但启动OTA任务失败:原因:OTA平台未查询到车辆信息; OTA信息:[请求成功][0]','执行状态切换成功,但启动OTA任务失败:原因:[OTA服务器异常-创建任务] OTA信息:[未查询到车辆零部件版本信息][-1]'] + AssertType: + sql: + teardown: + diff --git a/data/dmglobal/dmglobalfota/global_aut_wakeup.yaml b/data/dmglobal/dmglobalfota/global_aut_wakeup.yaml new file mode 100644 index 0000000..7da9c6c --- /dev/null +++ b/data/dmglobal/dmglobalfota/global_aut_wakeup.yaml @@ -0,0 +1,69 @@ +case_common: + allureEpic: DM系统 + allureFeature: FOTA系统 + allureStory: 一键唤醒 + markers: global + +global_aut_wakeup_01: + host: ${{get_host11}} + url: /tsp-server/api/index/auto/ota/auto-wakeup + method: PUt + detail: 一键唤醒关闭 + headers: + Content-Type: application/json + Authorization: $cache{login_token} +# Content-Type: $cache{case_process} + requestType: JSON + # 是否有依赖业务,为空或者false则表示没有 + is_run: + data: + autoWakeUp: false + vin: $cache{vin_global} +# 请求类型:params 是以url拼接的形式请求,json则传的是json串 + assert: + status: + jsonpath: $.status + type: == + value: 200 + AssertType: + # 断言接口返回的username + message: + jsonpath: $.message + type: == + value: "ok" + AssertType: + sql: + teardown: + +global_aut_wakeup_02: + host: ${{get_host11}} + url: /tsp-server/api/index/auto/ota/auto-wakeup + method: PUt + detail: 一键唤醒开启 + headers: + Content-Type: application/json + Authorization: $cache{login_token} +# Content-Type: $cache{case_process} + requestType: JSON + # 是否有依赖业务,为空或者false则表示没有 + is_run: + data: + autoWakeUp: true + vin: $cache{vin_global} +# 请求类型:params 是以url拼接的形式请求,json则传的是json串 + assert: + status: + jsonpath: $.status + type: == + value: 200 + AssertType: + # 断言接口返回的username + message: + jsonpath: $.message + type: == + value: "ok" + AssertType: + sql: + teardown: + + diff --git a/data/dmglobal/dmglobalfota/global_completed_list.yaml b/data/dmglobal/dmglobalfota/global_completed_list.yaml new file mode 100644 index 0000000..49f68ca --- /dev/null +++ b/data/dmglobal/dmglobalfota/global_completed_list.yaml @@ -0,0 +1,48 @@ +case_common: + allureEpic: DM系统 + allureFeature: FOTA系统 + allureStory: 已完成任务列表 + markers: global + +global_completed_list_01: + host: ${{get_host11}} + url: /tsp-server/api/index/auto/ota/completed/list/73494951000000000 + method: GET + detail: 已完成任务列表 + headers: + Content-Type: application/json + Authorization: $cache{login_token} +# Content-Type: $cache{case_process} + requestType: PARAMS + # 是否有依赖业务,为空或者false则表示没有 + is_run: + data: +# 请求类型:params 是以url拼接的形式请求,json则传的是json串 + assert: + status: + jsonpath: $.status + type: == + value: 200 + AssertType: + # 断言接口返回的username + data: + jsonpath: $.message + type: == + value: "ok" + AssertType: + # 断言接口返回的username + sql: + teardown: + - step: + - dependent_type: response + jsonpath: $.data[0].completedTaskId + set_value: $set_cache{completedTaskId} + - step: + - dependent_type: response + jsonpath: $.data[0].otaTaskId + set_value: $set_cache{otaTaskId} + - step: + - dependent_type: response + jsonpath: $.data[0].otaTaskCarId + set_value: $set_cache{otaTaskCarId} + diff --git a/files/BCM_3.3.3_1.2.2_4.5.zip b/files/BCM_3.3.3_1.2.2_4.5.zip new file mode 100644 index 0000000..d5deb1c Binary files /dev/null and b/files/BCM_3.3.3_1.2.2_4.5.zip differ diff --git a/files/BMS v3.3.dbc b/files/BMS v3.3.dbc new file mode 100644 index 0000000..8802e70 --- /dev/null +++ b/files/BMS v3.3.dbc @@ -0,0 +1,401 @@ +VERSION "" + + +NS_ : + NS_DESC_ + CM_ + BA_DEF_ + BA_ + VAL_ + CAT_DEF_ + CAT_ + FILTER + BA_DEF_DEF_ + EV_DATA_ + ENVVAR_DATA_ + SGTYPE_ + SGTYPE_VAL_ + BA_DEF_SGTYPE_ + BA_SGTYPE_ + SIG_TYPE_REF_ + VAL_TABLE_ + SIG_GROUP_ + SIG_VALTYPE_ + SIGTYPE_VALTYPE_ + BO_TX_BU_ + BA_DEF_REL_ + BA_REL_ + BA_DEF_DEF_REL_ + BU_SG_REL_ + BU_EV_REL_ + BU_BO_REL_ + SG_MUL_VAL_ + +BS_: + +BU_: BMS +VAL_TABLE_ BMS_BatRunSts 2 "ѹ" 1 "ŵѹ" 0 "δ" ; +VAL_TABLE_ BMS_BatFstChgSts 3 "" 2 "УǹУ" 1 "ɳ磨Ѳǹδ磩" 0 "ɳ磨δǹ" ; +VAL_TABLE_ BMS_BatSlwChgSts 2 "" 1 "ɳ" 0 "ɳ" ; +VAL_TABLE_ BMS_WakeUpSrc 1 "CAN" 0 "Կ׻" ; +VAL_TABLE_ BMS_BatRlyStsFstChgNeg 1 "պ" 0 "Ͽ" ; +VAL_TABLE_ BMS_BatRlyStsFstChgPos 1 "պ" 0 "Ͽ" ; +VAL_TABLE_ BMS_BatErrLvl 4 "4ϣжϣ" 3 "3ϣ" 2 "2ϣʣ" 1 "1ϣ" 0 "" ; +VAL_TABLE_ BMS_BatHVLkStsChrg 1 "պ" 0 "Ͽ" ; +VAL_TABLE_ BMS_BatHVLkStsMain 1 "պ" 0 "Ͽ" ; +VAL_TABLE_ BMS_BatRlyStsSlwChg 1 "պ" 0 "Ͽ" ; +VAL_TABLE_ BMS_BatRlyStsNeg 1 "պ" 0 "Ͽ" ; +VAL_TABLE_ BMS_BatRlyStsPos 1 "պ" 0 "Ͽ" ; +VAL_TABLE_ BMS_BatRlyStsPre 1 "պ" 0 "Ͽ" ; +VAL_TABLE_ BMS_BatLockSts 1 "" 0 "" ; +VAL_TABLE_ BMS_ErrTempSens 12 "ϵȼ4" 9 "ϵȼ3" 6 "ϵȼ2" 3 "ϵȼ1" 0 "ϵȼ0" ; +VAL_TABLE_ BMS_ErrInitFail 12 "ϵȼ4" 9 "ϵȼ3" 6 "ϵȼ2" 3 "ϵȼ1" 0 "ϵȼ0" ; +VAL_TABLE_ BMS_ErrFCSocketT 12 "ϵȼ4" 9 "ϵȼ3" 6 "ϵȼ2" 3 "ϵȼ1" 0 "ϵȼ0" ; +VAL_TABLE_ BMS_ErrPoleTmpHi 12 "ϵȼ4" 9 "ϵȼ3" 6 "ϵȼ2" 3 "ϵȼ1" 0 "ϵȼ0" ; +VAL_TABLE_ BMS_ErrLowVolLo 12 "ϵȼ4" 9 "ϵȼ3" 6 "ϵȼ2" 3 "ϵȼ1" 0 "ϵȼ0" ; +VAL_TABLE_ BMS_ErrLowVolHi 12 "ϵȼ4" 9 "ϵȼ3" 6 "ϵȼ2" 3 "ϵȼ1" 0 "ϵȼ0" ; +VAL_TABLE_ BMS_ErrSOCDevLrg 12 "ϵȼ4" 9 "ϵȼ3" 6 "ϵȼ2" 3 "ϵȼ1" 0 "ϵȼ0" ; +VAL_TABLE_ BMS_ErrBMS 12 "ϵȼ4" 9 "ϵȼ3" 6 "ϵȼ2" 3 "ϵȼ1" 0 "ϵȼ0" ; +VAL_TABLE_ BMS_ErrHVLk 12 "ϵȼ4" 9 "ϵȼ3" 6 "ϵȼ2" 3 "ϵȼ1" 0 "ϵȼ0" ; +VAL_TABLE_ BMS_ErrIR 12 "ϵȼ4" 9 "ϵȼ3" 6 "ϵȼ2" 3 "ϵȼ1" 0 "ϵȼ0" ; +VAL_TABLE_ BMS_ErrPreChg 12 "ϵȼ4" 9 "ϵȼ3" 6 "ϵȼ2" 3 "ϵȼ1" 0 "ϵȼ0" ; +VAL_TABLE_ BMS_ErrInnerRly 12 "ϵȼ4" 9 "ϵȼ3" 6 "ϵȼ2" 3 "ϵȼ1" 0 "ϵȼ0" ; +VAL_TABLE_ BMS_ErrSOCLo 12 "ϵȼ4" 9 "ϵȼ3" 6 "ϵȼ2" 3 "ϵȼ1" 0 "ϵȼ0" ; +VAL_TABLE_ BMS_ErrFeedBack 12 "ϵȼ4" 9 "ϵȼ3" 6 "ϵȼ2" 3 "ϵȼ1" 0 "ϵȼ0" ; +VAL_TABLE_ BMS_ErrDischgCurHi 12 "ϵȼ4" 9 "ϵȼ3" 6 "ϵȼ2" 3 "ϵȼ1" 0 "ϵȼ0" ; +VAL_TABLE_ BMS_ErrChgCurHi 12 "ϵȼ4" 9 "ϵȼ3" 6 "ϵȼ2" 3 "ϵȼ1" 0 "ϵȼ0" ; +VAL_TABLE_ BMS_ErrCeTmpDiffHi 12 "ϵȼ4" 9 "ϵȼ3" 6 "ϵȼ2" 3 "ϵȼ1" 0 "ϵȼ0" ; +VAL_TABLE_ BMS_ErrCelVolDiffHi 12 "ϵȼ4" 9 "ϵȼ3" 6 "ϵȼ2" 3 "ϵȼ1" 0 "ϵȼ0" ; +VAL_TABLE_ BMS_ErrSysVolHi 12 "ϵȼ4" 9 "ϵȼ3" 6 "ϵȼ2" 3 "ϵȼ1" 0 "ϵȼ0" ; +VAL_TABLE_ BMS_ErrSysVolLo 12 "ϵȼ4" 9 "ϵȼ3" 6 "ϵȼ2" 3 "ϵȼ1" 0 "ϵȼ0" ; +VAL_TABLE_ BMS_ErrCelTmpLo 12 "ϵȼ4" 9 "ϵȼ3" 6 "ϵȼ2" 3 "ϵȼ1" 0 "ϵȼ0" ; +VAL_TABLE_ BMS_ErrCelTmpHi 12 "ϵȼ4" 9 "ϵȼ3" 6 "ϵȼ2" 3 "ϵȼ1" 0 "ϵȼ0" ; +VAL_TABLE_ BMS_ErrCelVolLo 12 "ϵȼ4" 9 "ϵȼ3" 6 "ϵȼ2" 3 "ϵȼ1" 0 "ϵȼ0" ; +VAL_TABLE_ BMS_ErrCelVolHi 12 "ϵȼ4" 9 "ϵȼ3" 6 "ϵȼ2" 3 "ϵȼ1" 0 "ϵȼ0" ; +VAL_TABLE_ BMS_Cmd_ECU_Req 164 "BMS" ; +VAL_TABLE_ BMS_Cmd_Func_Param 1 "" 0 "ر" ; +VAL_TABLE_ BMS_PostRes 255 "Update Failed since last reboot" 2 "Update Failed since last reboot" 1 "Update Success since last reboot" 0 "No Error" ; +VAL_TABLE_ BMS_FwVer 255 "Software version" 46 "Software revision" 1 "Software revision" 0 "Software version" ; +VAL_TABLE_ BMS_HwVer 46 "Hardware revision" 2 "Hardware version" 1 "Hardware revision" ; +VAL_TABLE_ BMS_Cmd_Return_Res 255 "Error Code" 0 "Error" ; +VAL_TABLE_ BMS_Cmd_Func_Req 35 "" 34 "" 5 "Ԥ" 4 "ѹϵź" 2 "SOC" 0 "" ; +VAL_TABLE_ BMS_BatRunSts 2 "ѹ" 1 "ŵѹ" 0 "δ" ; +VAL_TABLE_ BMS_BatFstChgSts 3 "" 2 "(ǹ)" 1 "ɳ(Ѳǹδ)" 0 "ɳ(δǹ)" ; +VAL_TABLE_ BMS_BatSlwChgSts 2 "" 1 "ɳ" 0 "ɳ" ; +VAL_TABLE_ BMS_WakeUpSrc 1 "CAN" 0 "Կ׻" ; +VAL_TABLE_ BMS_BatRlyStsFstChgNeg 1 "պ" 0 "Ͽ" ; +VAL_TABLE_ BMS_BatRlyStsFstChgPos 1 "պ" 0 "Ͽ" ; +VAL_TABLE_ BMS_BatErrLvl 4 "4(ж)" 3 "3()" 2 "2()" 1 "1()" 0 "" ; +VAL_TABLE_ BMS_BatHVLkStsChrg 1 "պ" 0 "Ͽ" ; +VAL_TABLE_ BMS_BatHVLkStsMain 1 "պ" 0 "Ͽ" ; +VAL_TABLE_ BMS_BatRlyStsSlwChg 1 "պ" 0 "Ͽ" ; +VAL_TABLE_ BMS_BatRlyStsNeg 1 "պ" 0 "Ͽ" ; +VAL_TABLE_ BMS_BatRlyStsPos 1 "պ" 0 "Ͽ" ; +VAL_TABLE_ BMS_BatRlystsPre 1 "պ" 0 "Ͽ" ; +VAL_TABLE_ BMS_BatlockSts 1 "" 0 "" ; + + +BO_ 3221225472 VECTOR__INDEPENDENT_SIG_MSG: 0 Vector__XXX + SG_ BMS_Cmd_Return : 0|8@1+ (1,0) [0|255] "" Vector__XXX + SG_ BMS_Cmd_Func_Param : 0|48@1+ (1,0) [-140737488355328|140737488355327] "" Vector__XXX + SG_ BMS_BatDisChgMaxPow : 0|16@1+ (10,0) [0|300000] "W" Vector__XXX + SG_ BMS_BatDisChgMaxCur : 0|16@1+ (1,0) [0|300] "A" Vector__XXX + SG_ BMS_Bat10sDisChgMaxPo : 0|16@1+ (10,0) [0|300000] "W" Vector__XXX + SG_ BMS_Bat10sDisChgMaxCur : 0|16@1+ (1,0) [0|300] "A" Vector__XXX + +BO_ 2565799844 BMS_Info2: 8 BMS + SG_ BMS_BatNum : 0|64@1+ (1,0) [0|1.84467440737096E+019] "" Vector__XXX + +BO_ 2550202276 BMS_Info1: 8 BMS + SG_ BMS_PostRes : 56|8@1+ (1,0) [0|255] "" Vector__XXX + SG_ BMS_FwVer : 24|32@1+ (1,0) [-2147483648|2147483647] "" Vector__XXX + SG_ BMS_HwVer : 0|24@1+ (1,0) [0|16777215] "" Vector__XXX + +BO_ 2559115172 BMS_Cmd_Res: 3 BMS + SG_ BMS_Cmd_Func : 8|8@1+ (1,0) [0|255] "" Vector__XXX + SG_ BMS_Cmd_ECU : 0|8@1+ (1,0) [0|255] "" Vector__XXX + +BO_ 2559091967 BMS_Cmd_Req: 8 BMS + SG_ BMS_Cmd_Func : 8|8@1+ (1,0) [0|255] "" Vector__XXX + SG_ BMS_Cmd_ECU : 0|8@1+ (1,0) [0|255] "" Vector__XXX + +BO_ 2551447460 BMS_Err3: 1 BMS + SG_ ChgStopErr8 : 7|1@1+ (1,0) [0|1] "" Vector__XXX + SG_ ChgStopErr7 : 6|1@1+ (1,0) [0|1] "" Vector__XXX + SG_ ChgStopErr6 : 5|1@1+ (1,0) [0|1] "" Vector__XXX + SG_ ChgStopErr5 : 4|1@1+ (1,0) [0|1] "" Vector__XXX + SG_ ChgStopErr4 : 3|1@1+ (1,0) [0|1] "" Vector__XXX + SG_ ChgStopErr3 : 2|1@1+ (1,0) [0|1] "" Vector__XXX + SG_ ChgStopErr2 : 1|1@1+ (1,0) [0|1] "" Vector__XXX + SG_ ChgStopErr1 : 0|1@1+ (1,0) [0|1] "" Vector__XXX + +BO_ 2551381924 BMS_Err2: 8 BMS + SG_ BMS_ErrTempSens : 48|4@1+ (1,0) [0|15] "" Vector__XXX + SG_ BMS_ErrInitFail : 44|4@1+ (1,0) [0|15] "" Vector__XXX + SG_ BMS_ErrFCSocketT : 40|4@1+ (1,0) [0|15] "" Vector__XXX + SG_ BMS_ErrPoleTmpHi : 36|4@1+ (1,0) [0|15] "" Vector__XXX + SG_ BMS_ErrLowVolLo : 28|4@1+ (1,0) [0|15] "" Vector__XXX + SG_ BMS_ErrLowVolHi : 24|4@1+ (1,0) [0|15] "" Vector__XXX + SG_ BMS_ErrSOCDevLrg : 8|4@1+ (1,0) [0|15] "" Vector__XXX + SG_ BMS_ErrBMS : 4|4@1+ (1,0) [0|15] "" Vector__XXX + +BO_ 2551316388 BMS_Err1: 8 BMS + SG_ BMS_ErrHVLk : 60|4@1+ (1,0) [0|15] "" Vector__XXX + SG_ BMS_ErrInnerRly : 48|4@1+ (1,0) [0|15] "" Vector__XXX + SG_ BMS_ErrIR : 56|4@1+ (1,0) [0|15] "" Vector__XXX + SG_ BMS_ErrPreChg : 52|4@1+ (1,0) [0|15] "" Vector__XXX + SG_ BMS_ErrSOCLo : 44|4@1+ (1,0) [0|15] "" Vector__XXX + SG_ BMS_ErrFeedBack : 40|4@1+ (1,0) [0|15] "" Vector__XXX + SG_ BMS_ErrDischgCurHi : 36|4@1+ (1,0) [0|15] "" Vector__XXX + SG_ BMS_ErrChgCurHi : 32|4@1+ (1,0) [0|15] "" Vector__XXX + SG_ BMS_ErrCelTmpDiffHi : 28|4@1+ (1,0) [0|15] "" Vector__XXX + SG_ BMS_ErrCelVolDiffHi : 24|4@1+ (1,0) [0|15] "" Vector__XXX + SG_ BMS_ErrSysVolLo : 20|4@1+ (1,0) [0|15] "" Vector__XXX + SG_ BMS_ErrSysVolHi : 16|4@1+ (1,0) [0|15] "" Vector__XXX + SG_ BMS_ErrCelTmpLo : 12|4@1+ (1,0) [0|15] "" Vector__XXX + SG_ BMS_ErrCelTmpHi : 8|4@1+ (1,0) [0|15] "" Vector__XXX + SG_ BMS_ErrCelVolLo : 4|4@1+ (1,0) [0|15] "" Vector__XXX + SG_ BMS_ErrCelVolHi : 0|4@1+ (1,0) [0|15] "" Vector__XXX + +BO_ 2550923172 BMS_Sts11: 3 BMS + SG_ BMS_EmgcyChgFlg : 16|1@1+ (1,0) [0|1] "" Vector__XXX + SG_ BMS_CurMaxChgSOC : 0|16@1+ (0.1,0) [0|100] "%" Vector__XXX + +BO_ 2550857636 BMS_Sts10: 8 BMS + SG_ BMS_RemainChgTime : 40|16@1+ (1,0) [0|65535] "min" Vector__XXX + SG_ BMS_ElapsedChgTime : 24|16@1+ (1,0) [0|65535] "min" Vector__XXX + SG_ BMS_SupplyVolt : 16|8@1+ (0.1,0) [0|30] "V" Vector__XXX + SG_ BMS_BatSOE : 0|16@1+ (0.1,0) [0|100] "%" Vector__XXX + +BO_ 2550792100 BMS_Sts9: 8 BMS + SG_ BMS_FstChgVolRqst : 16|16@1+ (1,0) [0|65535] "V" Vector__XXX + SG_ BMS_FstChgCurRqst : 0|16@1+ (1,0) [0|100] "A" Vector__XXX + SG_ BMS_CC2 : 48|16@1+ (1,0) [0|1] "mv" Vector__XXX + SG_ BMS_CC : 32|16@1+ (1,0) [0|1] "ohm" Vector__XXX + +BO_ 2550726564 BMS_Sts8: 7 BMS + SG_ BMS_FstChgCapA : 16|8@1+ (1,0) [0|255] "A" Vector__XXX + SG_ BMS_CurChgPow : 24|16@1+ (10,0) [0|300000] "W" Vector__XXX + SG_ BMS_CurChgCur : 40|16@1+ (1,0) [0|300] "A" Vector__XXX + SG_ BMS_ChgPlugSts : 8|1@1+ (1,0) [0|1] "" Vector__XXX + SG_ BMS_ChgPlugS3Sts : 9|1@1+ (1,0) [0|1] "" Vector__XXX + SG_ BMS_OBCChgCapA : 0|8@1+ (1,0) [0|63] "A" Vector__XXX + +BO_ 2550661028 BMS_Sts7: 8 BMS + SG_ BMS_OBCChgVolRqst : 16|16@1+ (0.1,0) [0|1100] "V" Vector__XXX + SG_ BMS_OBCChgCurRqst : 0|16@1+ (0.1,0) [0|300] "A" Vector__XXX + SG_ BMS_CPSts : 48|8@1+ (1,0) [0|255] "%" Vector__XXX + SG_ BMS_CCSts : 56|1@1+ (1,0) [0|1] "" Vector__XXX + SG_ BMS_CC2Sts : 57|1@1+ (1,0) [0|1] "" Vector__XXX + SG_ BMS_BatChgVolCpl : 32|16@1+ (1,0) [0|65535] "V" Vector__XXX + +BO_ 2550595492 BMS_Sts6: 6 BMS + SG_ BMS_FCSocketMaxT : 40|8@1+ (1,-40) [-40|210] "" Vector__XXX + SG_ BMS_BatMinCelTNum : 24|8@1+ (1,0) [0|255] "" Vector__XXX + SG_ BMS_BatMinCelT : 16|8@1+ (1,-40) [-40|210] "" Vector__XXX + SG_ BMS_BatMaxCelTNum : 8|8@1+ (1,0) [0|255] "" Vector__XXX + SG_ BMS_BatMaxCelT : 0|8@1+ (1,-40) [-40|210] "" Vector__XXX + SG_ BMS_BatAvgCelT : 32|8@1+ (1,-40) [-40|210] "" Vector__XXX + +BO_ 2550529956 BMS_Sts5: 8 BMS + SG_ BMS_BatAvgCelVol : 48|16@1+ (0.001,0) [0|5] "V" Vector__XXX + SG_ BMS_BatMinCelVNum : 40|8@1+ (1,0) [0|255] "" Vector__XXX + SG_ BMS_BatMinCelVol : 24|16@1+ (0.001,0) [0|5] "V" Vector__XXX + SG_ BMS_BatMaxCelVNum : 16|8@1+ (1,0) [0|255] "" Vector__XXX + SG_ BMS_BatMaxCelVol : 0|16@1+ (0.001,0) [0|5] "V" Vector__XXX + +BO_ 2550464420 BMS_Sts4: 8 BMS + SG_ BMS_BatChgMaxPow : 16|16@1+ (10,0) [0|300000] "W" Vector__XXX + SG_ BMS_BatChgMaxCur : 48|16@1+ (1,0) [0|300] "A" Vector__XXX + SG_ BMS_Bat10sChgMaxPow : 0|16@1+ (10,0) [0|300000] "W" Vector__XXX + SG_ BMS_Bat10sChgMaxCur : 32|16@1+ (1,0) [0|300] "A" Vector__XXX + +BO_ 2550398884 BMS_Sts3: 8 BMS + SG_ BMS_BatDisChgMaxPow : 16|16@1+ (10,0) [0|300000] "W" Vector__XXX + SG_ BMS_BatDisChgMaxCur : 48|16@1+ (1,0) [0|300] "A" Vector__XXX + SG_ BMS_Bat10sDisChgMaxPo : 0|16@1+ (10,0) [0|300000] "W" Vector__XXX + SG_ BMS_Bat10sDisChgMaxCur : 32|16@1+ (1,0) [0|300] "A" Vector__XXX + +BO_ 2550333348 BMS_Sts2: 8 BMS + SG_ BMS_BatOuterVol : 0|16@1+ (0.1,0) [0|1100] "V" Vector__XXX + SG_ BMS_BatInnerVol : 16|16@1+ (0.1,0) [0|1100] "V" Vector__XXX + SG_ BMS_BatTalCur : 32|16@1+ (0.1,-300) [-300|300] "A" Vector__XXX + SG_ BMS_BatIslatRes : 48|16@1+ (1,0) [0|60000] "kOhm" Vector__XXX + +BO_ 2550267812 BMS_Sts1: 8 BMS + SG_ BMS_WakeUpSrc : 50|2@1+ (1,0) [0|1] "" Vector__XXX + SG_ BMS_BatSOH : 32|16@1+ (0.1,0) [0|100] "%" Vector__XXX + SG_ BMS_BatSOC : 16|16@1+ (0.1,0) [0|100] "%" Vector__XXX + SG_ BMS_BatSlwChgSts : 56|2@1+ (1,0) [0|2] "" Vector__XXX + SG_ BMS_BatRunSts : 60|4@1+ (1,0) [0|2] "" Vector__XXX + SG_ BMS_BatRlyStsSlwChg : 4|1@1+ (1,0) [0|1] "" Vector__XXX + SG_ BMS_BatRlyStsPre : 1|1@1+ (1,0) [0|1] "" Vector__XXX + SG_ BMS_BatRlyStsPos : 2|1@1+ (1,0) [0|1] "" Vector__XXX + SG_ BMS_BatRlyStsNeg : 3|1@1+ (1,0) [0|1] "" Vector__XXX + SG_ BMS_BatRlyStsFstChgPos : 48|1@1+ (1,0) [0|1] "" Vector__XXX + SG_ BMS_BatRlyStsFstChgNeg : 49|1@1+ (1,0) [0|1] "" Vector__XXX + SG_ BMS_BatLockSts : 0|1@1+ (1,0) [0|1] "" Vector__XXX + SG_ BMS_BatHVLkStsMain : 5|1@1+ (1,0) [0|1] "" Vector__XXX + SG_ BMS_BatHVLkStsChrg : 6|1@1+ (1,0) [0|1] "" Vector__XXX + SG_ BMS_BatFstChgSts : 58|2@1+ (1,0) [0|2] "" Vector__XXX + SG_ BMS_BatErrLvl : 8|8@1+ (1,0) [-128|127] "" Vector__XXX + + + +CM_ BO_ 3221225472 "This is a message for not used signals, created by Vector CANdb++ DBC OLE DB Provider."; +CM_ SG_ 3221225472 BMS_Cmd_Return "ƹӦ"; +CM_ SG_ 3221225472 BMS_Cmd_Func_Param "ƹܲ"; +CM_ SG_ 3221225472 BMS_BatDisChgMaxPow "ϵͳŵ繦"; +CM_ SG_ 3221225472 BMS_BatDisChgMaxCur "ϵͳŵ"; +CM_ SG_ 3221225472 BMS_Bat10sDisChgMaxPo "ϵͳ10sŵ繦 "; +CM_ SG_ 3221225472 BMS_Bat10sDisChgMaxCur "ϵͳ10sŵ"; +CM_ SG_ 2565799844 BMS_BatNum "رŸݡGBT 34014-2017 +壬24ַ4֡ͣЭ +Э鶨塱"; +CM_ SG_ 2550202276 BMS_PostRes "Լ"; +CM_ SG_ 2550202276 BMS_FwVer "̼汾 "; +CM_ SG_ 2550202276 BMS_HwVer "Ӳ汾"; +CM_ SG_ 2559115172 BMS_Cmd_Func "ƹ"; +CM_ SG_ 2559115172 BMS_Cmd_ECU "ƶ"; +CM_ SG_ 2559091967 BMS_Cmd_Func "ƹ"; +CM_ SG_ 2559091967 BMS_Cmd_ECU "ƶ"; +CM_ SG_ 2551447460 ChgStopErr8 "ճ״̬ijʱֹ"; +CM_ SG_ 2551447460 ChgStopErr7 "ֹͣ"; +CM_ SG_ 2551447460 ChgStopErr6 "BMSֹܴ"; +CM_ SG_ 2551447460 ChgStopErr5 "ڲֹ"; +CM_ SG_ 2551447460 ChgStopErr4 "ֹ"; +CM_ SG_ 2551447460 ChgStopErr3 "ֹ"; +CM_ SG_ 2551447460 ChgStopErr2 "˹ֹͣ"; +CM_ SG_ 2551447460 ChgStopErr1 "عֹͣ"; +CM_ SG_ 2551381924 BMS_ErrTempSens "¶ȴ "; +CM_ SG_ 2551381924 BMS_ErrInitFail "BMSʼɹ"; +CM_ SG_ 2551381924 BMS_ErrFCSocketT "¶ȹ"; +CM_ SG_ 2551381924 BMS_ErrPoleTmpHi "¶ȹ"; +CM_ SG_ 2551381924 BMS_ErrLowVolLo "ѹ"; +CM_ SG_ 2551381924 BMS_ErrLowVolHi "ѹ"; +CM_ SG_ 2551381924 BMS_ErrSOCDevLrg "SOCƫ"; +CM_ SG_ 2551381924 BMS_ErrBMS "BMS"; +CM_ SG_ 2551316388 BMS_ErrHVLk "ѹ"; +CM_ SG_ 2551316388 BMS_ErrInnerRly "̵"; +CM_ SG_ 2551316388 BMS_ErrIR "Ե "; +CM_ SG_ 2551316388 BMS_ErrPreChg "Ԥ"; +CM_ SG_ 2551316388 BMS_ErrSOCLo "SOC"; +CM_ SG_ 2551316388 BMS_ErrFeedBack ""; +CM_ SG_ 2551316388 BMS_ErrDischgCurHi "ŵ"; +CM_ SG_ 2551316388 BMS_ErrChgCurHi " "; +CM_ SG_ 2551316388 BMS_ErrCelTmpDiffHi "²"; +CM_ SG_ 2551316388 BMS_ErrCelVolDiffHi "ѹ"; +CM_ SG_ 2551316388 BMS_ErrSysVolLo "ܵѹ"; +CM_ SG_ 2551316388 BMS_ErrSysVolHi "ܵѹ"; +CM_ SG_ 2551316388 BMS_ErrCelTmpLo "¶ȹ "; +CM_ SG_ 2551316388 BMS_ErrCelTmpHi "¶ȹ"; +CM_ SG_ 2551316388 BMS_ErrCelVolLo "ѹ "; +CM_ SG_ 2551316388 BMS_ErrCelVolHi "ѹ"; +CM_ SG_ 2550923172 BMS_EmgcyChgFlg "Ӧ־"; +CM_ SG_ 2550923172 BMS_CurMaxChgSOC "ǰijSOC"; +CM_ SG_ 2550857636 BMS_RemainChgTime "Ԥʣʱ "; +CM_ SG_ 2550857636 BMS_ElapsedChgTime "Ѿʱ"; +CM_ SG_ 2550857636 BMS_SupplyVolt "пصصѹ"; +CM_ SG_ 2550857636 BMS_BatSOE "ϵͳSOE"; +CM_ SG_ 2550792100 BMS_FstChgVolRqst "BMSѹ"; +CM_ SG_ 2550792100 BMS_FstChgCurRqst "BMS"; +CM_ SG_ 2550792100 BMS_CC2 "BMSCC2״̬䣩"; +CM_ SG_ 2550792100 BMS_CC "BMSCC״̬䣩"; +CM_ SG_ 2550726564 BMS_FstChgCapA "BMSװó䣩"; +CM_ SG_ 2550726564 BMS_CurChgPow "ǰܻչ"; +CM_ SG_ 2550726564 BMS_CurChgCur "ǰܻյ"; +CM_ SG_ 2550726564 BMS_ChgPlugSts "BMSӲ״̬䣩"; +CM_ SG_ 2550726564 BMS_ChgPlugS3Sts "BMSCC_S3״̬䣩 "; +CM_ SG_ 2550726564 BMS_OBCChgCapA "BMSװó䣩"; +CM_ SG_ 2550661028 BMS_OBCChgVolRqst "BMSOBCѹ "; +CM_ SG_ 2550661028 BMS_OBCChgCurRqst "BMSOBC"; +CM_ SG_ 2550661028 BMS_CPSts "BMSCP״̬䣩"; +CM_ SG_ 2550661028 BMS_CCSts "BMSCC״̬䣩"; +CM_ SG_ 2550661028 BMS_CC2Sts "BMSCC2״̬䣩 "; +CM_ SG_ 2550661028 BMS_BatChgVolCpl "ϵͳɵѹ"; +CM_ SG_ 2550595492 BMS_FCSocketMaxT "¶ȵ¶"; +CM_ SG_ 2550595492 BMS_BatMinCelTNum "¶ȵغ"; +CM_ SG_ 2550595492 BMS_BatMinCelT "¶ȵ¶ "; +CM_ SG_ 2550595492 BMS_BatMaxCelTNum "¶ȵغ"; +CM_ SG_ 2550595492 BMS_BatMaxCelT "¶ȵ¶"; +CM_ SG_ 2550595492 BMS_BatAvgCelT "ƽ¶"; +CM_ SG_ 2550529956 BMS_BatAvgCelVol "ƽѹ"; +CM_ SG_ 2550529956 BMS_BatMinCelVNum "͵غ"; +CM_ SG_ 2550529956 BMS_BatMinCelVol "͵ѹ"; +CM_ SG_ 2550529956 BMS_BatMaxCelVNum "ߵغ"; +CM_ SG_ 2550529956 BMS_BatMaxCelVol "ߵѹ"; +CM_ SG_ 2550464420 BMS_BatChgMaxPow "ϵͳ繦"; +CM_ SG_ 2550464420 BMS_BatChgMaxCur "ϵͳ"; +CM_ SG_ 2550464420 BMS_Bat10sChgMaxPow "ϵͳ10s繦"; +CM_ SG_ 2550464420 BMS_Bat10sChgMaxCur "ϵͳ10s"; +CM_ SG_ 2550398884 BMS_BatDisChgMaxPow "ϵͳŵ繦"; +CM_ SG_ 2550398884 BMS_BatDisChgMaxCur "ϵͳŵ"; +CM_ SG_ 2550398884 BMS_Bat10sDisChgMaxPo "ϵͳ10sŵ繦 "; +CM_ SG_ 2550398884 BMS_Bat10sDisChgMaxCur "ϵͳ10sŵ"; +CM_ SG_ 2550333348 BMS_BatOuterVol "ϵͳܵѹ"; +CM_ SG_ 2550333348 BMS_BatInnerVol "ϵͳڲܵѹ"; +CM_ SG_ 2550333348 BMS_BatTalCur "ϵͳܵ"; +CM_ SG_ 2550333348 BMS_BatIslatRes "ϵͳԵֵ"; +CM_ SG_ 2550267812 BMS_WakeUpSrc "BMSԴ"; +CM_ SG_ 2550267812 BMS_BatSOH "ϵͳSOH"; +CM_ SG_ 2550267812 BMS_BatSOC "ϵͳSOC"; +CM_ SG_ 2550267812 BMS_BatSlwChgSts "ϵͳ״̬"; +CM_ SG_ 2550267812 BMS_BatRunSts "ϵͳ״̬"; +CM_ SG_ 2550267812 BMS_BatRlyStsSlwChg "ϵͳ̵״̬"; +CM_ SG_ 2550267812 BMS_BatRlyStsPre "ϵͳԤ̵״̬"; +CM_ SG_ 2550267812 BMS_BatRlyStsPos "ϵͳ̵״̬"; +CM_ SG_ 2550267812 BMS_BatRlyStsNeg "ϵͳ̵״̬"; +CM_ SG_ 2550267812 BMS_BatRlyStsFstChgPos "ϵͳ̵״̬"; +CM_ SG_ 2550267812 BMS_BatRlyStsFstChgNeg "ϵͳ为̵״̬"; +CM_ SG_ 2550267812 BMS_BatLockSts "ؽ״̬"; +CM_ SG_ 2550267812 BMS_BatHVLkStsMain "ѹ״̬"; +CM_ SG_ 2550267812 BMS_BatHVLkStsChrg "ѹ״̬"; +CM_ SG_ 2550267812 BMS_BatFstChgSts "ϵͳ״̬"; +CM_ SG_ 2550267812 BMS_BatErrLvl "ϵͳϵȼ"; +BA_DEF_ "BusType" STRING ; +BA_DEF_DEF_ "BusType" "CAN"; +VAL_ 3221225472 BMS_Cmd_Return 255 "Error Code" 0 "Error" ; +VAL_ 3221225472 BMS_Cmd_Func_Param 1 "" 0 "ر" ; +VAL_ 2550202276 BMS_PostRes 2 "Update Failed since last reboot" 1 "Update Success since last reboot" 0 "No Error" ; +VAL_ 2559115172 BMS_Cmd_Func 35 "" 34 "" 5 "Ԥ" 4 "ѹϵź" 2 "SOC" 0 "" ; +VAL_ 2559091967 BMS_Cmd_Func 35 "" 34 "" 5 "Ԥ" 4 "ѹϵź" 2 "SOC" 0 "" ; +VAL_ 2559091967 BMS_Cmd_ECU 164 "BMS" ; +VAL_ 2551381924 BMS_ErrTempSens 12 "ϵȼ4" 9 "ϵȼ3" 6 "ϵȼ2" 3 "ϵȼ1" 0 "ϵȼ0" ; +VAL_ 2551381924 BMS_ErrInitFail 12 "ϵȼ4" 9 "ϵȼ3" 6 "ϵȼ2" 3 "ϵȼ1" 0 "ϵȼ0" ; +VAL_ 2551381924 BMS_ErrFCSocketT 12 "ϵȼ4" 9 "ϵȼ3" 6 "ϵȼ2" 3 "ϵȼ1" 0 "ϵȼ0" ; +VAL_ 2551381924 BMS_ErrPoleTmpHi 12 "ϵȼ4" 9 "ϵȼ3" 6 "ϵȼ2" 3 "ϵȼ1" 0 "ϵȼ0" ; +VAL_ 2551381924 BMS_ErrLowVolLo 12 "ϵȼ4" 9 "ϵȼ3" 6 "ϵȼ2" 3 "ϵȼ1" 0 "ϵȼ0" ; +VAL_ 2551381924 BMS_ErrLowVolHi 12 "ϵȼ4" 9 "ϵȼ3" 6 "ϵȼ2" 3 "ϵȼ1" 0 "ϵȼ0" ; +VAL_ 2551381924 BMS_ErrSOCDevLrg 12 "ϵȼ4" 9 "ϵȼ3" 6 "ϵȼ2" 3 "ϵȼ1" 0 "ϵȼ0" ; +VAL_ 2551381924 BMS_ErrBMS 12 "ϵȼ4" 9 "ϵȼ3" 6 "ϵȼ2" 3 "ϵȼ1" 0 "ϵȼ0" ; +VAL_ 2551316388 BMS_ErrHVLk 12 "ϵȼ4" 9 "ϵȼ3" 6 "ϵȼ2" 3 "ϵȼ1" 0 "ϵȼ0" ; +VAL_ 2551316388 BMS_ErrInnerRly 12 "ϵȼ4" 9 "ϵȼ3" 6 "ϵȼ2" 3 "ϵȼ1" 0 "ϵȼ0" ; +VAL_ 2551316388 BMS_ErrIR 12 "ϵȼ4" 9 "ϵȼ3" 6 "ϵȼ2" 3 "ϵȼ1" 0 "ϵȼ0" ; +VAL_ 2551316388 BMS_ErrPreChg 12 "ϵȼ4" 9 "ϵȼ3" 6 "ϵȼ2" 3 "ϵȼ1" 0 "ϵȼ0" ; +VAL_ 2551316388 BMS_ErrSOCLo 12 "ϵȼ4" 9 "ϵȼ3" 6 "ϵȼ2" 3 "ϵȼ1" 0 "ϵȼ0" ; +VAL_ 2551316388 BMS_ErrFeedBack 12 "ϵȼ4" 9 "ϵȼ3" 6 "ϵȼ2" 3 "ϵȼ1" 0 "ϵȼ0" ; +VAL_ 2551316388 BMS_ErrDischgCurHi 12 "ϵȼ4" 9 "ϵȼ3" 6 "ϵȼ2" 3 "ϵȼ1" 0 "ϵȼ0" ; +VAL_ 2551316388 BMS_ErrChgCurHi 12 "ϵȼ4" 9 "ϵȼ3" 6 "ϵȼ2" 3 "ϵȼ1" 0 "ϵȼ0" ; +VAL_ 2551316388 BMS_ErrCelTmpDiffHi 12 "ϵȼ4" 9 "ϵȼ3" 6 "ϵȼ2" 3 "ϵȼ1" 0 "ϵȼ0" ; +VAL_ 2551316388 BMS_ErrCelVolDiffHi 12 "ϵȼ4" 9 "ϵȼ3" 6 "ϵȼ2" 3 "ϵȼ1" 0 "ϵȼ0" ; +VAL_ 2551316388 BMS_ErrSysVolLo 12 "ϵȼ4" 9 "ϵȼ3" 6 "ϵȼ2" 3 "ϵȼ1" 0 "ϵȼ0" ; +VAL_ 2551316388 BMS_ErrSysVolHi 12 "ϵȼ4" 9 "ϵȼ3" 6 "ϵȼ2" 3 "ϵȼ1" 0 "ϵȼ0" ; +VAL_ 2551316388 BMS_ErrCelTmpLo 12 "ϵȼ4" 9 "ϵȼ3" 6 "ϵȼ2" 3 "ϵȼ1" 0 "ϵȼ0" ; +VAL_ 2551316388 BMS_ErrCelTmpHi 12 "ϵȼ4" 9 "ϵȼ3" 6 "ϵȼ2" 3 "ϵȼ1" 0 "ϵȼ0" ; +VAL_ 2551316388 BMS_ErrCelVolLo 12 "ϵȼ4" 9 "ϵȼ3" 6 "ϵȼ2" 3 "ϵȼ1" 0 "ϵȼ0" ; +VAL_ 2551316388 BMS_ErrCelVolHi 12 "ϵȼ4" 9 "ϵȼ3" 6 "ϵȼ2" 3 "ϵȼ1" 0 "ϵȼ0" ; +VAL_ 2550267812 BMS_WakeUpSrc 1 "CAN" 0 "Կ׻" ; +VAL_ 2550267812 BMS_BatSlwChgSts 2 "" 1 "ɳ" 0 "ɳ" ; +VAL_ 2550267812 BMS_BatRunSts 2 "ѹ" 1 "ŵѹ" 0 "δ" ; +VAL_ 2550267812 BMS_BatRlyStsSlwChg 1 "պ" 0 "Ͽ" ; +VAL_ 2550267812 BMS_BatRlyStsPre 1 "պ" 0 "Ͽ" ; +VAL_ 2550267812 BMS_BatRlyStsPos 1 "պ" 0 "Ͽ" ; +VAL_ 2550267812 BMS_BatRlyStsNeg 1 "պ" 0 "Ͽ" ; +VAL_ 2550267812 BMS_BatRlyStsFstChgPos 1 "պ" 0 "Ͽ" ; +VAL_ 2550267812 BMS_BatRlyStsFstChgNeg 1 "պ" 0 "Ͽ" ; +VAL_ 2550267812 BMS_BatLockSts 1 "" 0 "" ; +VAL_ 2550267812 BMS_BatHVLkStsMain 1 "պ" 0 "Ͽ" ; +VAL_ 2550267812 BMS_BatHVLkStsChrg 1 "պ" 0 "Ͽ" ; +VAL_ 2550267812 BMS_BatFstChgSts 3 "" 2 "(ǹ)" 1 "ɳ(Ѳǹδ)" 0 "ɳ(δǹ)" ; +VAL_ 2550267812 BMS_BatErrLvl 4 "4(ж)" 3 "3()" 2 "2()" 1 "1()" 0 "" ; + diff --git a/files/BMSv3.3.dbc b/files/BMSv3.3.dbc new file mode 100644 index 0000000..8802e70 --- /dev/null +++ b/files/BMSv3.3.dbc @@ -0,0 +1,401 @@ +VERSION "" + + +NS_ : + NS_DESC_ + CM_ + BA_DEF_ + BA_ + VAL_ + CAT_DEF_ + CAT_ + FILTER + BA_DEF_DEF_ + EV_DATA_ + ENVVAR_DATA_ + SGTYPE_ + SGTYPE_VAL_ + BA_DEF_SGTYPE_ + BA_SGTYPE_ + SIG_TYPE_REF_ + VAL_TABLE_ + SIG_GROUP_ + SIG_VALTYPE_ + SIGTYPE_VALTYPE_ + BO_TX_BU_ + BA_DEF_REL_ + BA_REL_ + BA_DEF_DEF_REL_ + BU_SG_REL_ + BU_EV_REL_ + BU_BO_REL_ + SG_MUL_VAL_ + +BS_: + +BU_: BMS +VAL_TABLE_ BMS_BatRunSts 2 "ѹ" 1 "ŵѹ" 0 "δ" ; +VAL_TABLE_ BMS_BatFstChgSts 3 "" 2 "УǹУ" 1 "ɳ磨Ѳǹδ磩" 0 "ɳ磨δǹ" ; +VAL_TABLE_ BMS_BatSlwChgSts 2 "" 1 "ɳ" 0 "ɳ" ; +VAL_TABLE_ BMS_WakeUpSrc 1 "CAN" 0 "Կ׻" ; +VAL_TABLE_ BMS_BatRlyStsFstChgNeg 1 "պ" 0 "Ͽ" ; +VAL_TABLE_ BMS_BatRlyStsFstChgPos 1 "պ" 0 "Ͽ" ; +VAL_TABLE_ BMS_BatErrLvl 4 "4ϣжϣ" 3 "3ϣ" 2 "2ϣʣ" 1 "1ϣ" 0 "" ; +VAL_TABLE_ BMS_BatHVLkStsChrg 1 "պ" 0 "Ͽ" ; +VAL_TABLE_ BMS_BatHVLkStsMain 1 "պ" 0 "Ͽ" ; +VAL_TABLE_ BMS_BatRlyStsSlwChg 1 "պ" 0 "Ͽ" ; +VAL_TABLE_ BMS_BatRlyStsNeg 1 "պ" 0 "Ͽ" ; +VAL_TABLE_ BMS_BatRlyStsPos 1 "պ" 0 "Ͽ" ; +VAL_TABLE_ BMS_BatRlyStsPre 1 "պ" 0 "Ͽ" ; +VAL_TABLE_ BMS_BatLockSts 1 "" 0 "" ; +VAL_TABLE_ BMS_ErrTempSens 12 "ϵȼ4" 9 "ϵȼ3" 6 "ϵȼ2" 3 "ϵȼ1" 0 "ϵȼ0" ; +VAL_TABLE_ BMS_ErrInitFail 12 "ϵȼ4" 9 "ϵȼ3" 6 "ϵȼ2" 3 "ϵȼ1" 0 "ϵȼ0" ; +VAL_TABLE_ BMS_ErrFCSocketT 12 "ϵȼ4" 9 "ϵȼ3" 6 "ϵȼ2" 3 "ϵȼ1" 0 "ϵȼ0" ; +VAL_TABLE_ BMS_ErrPoleTmpHi 12 "ϵȼ4" 9 "ϵȼ3" 6 "ϵȼ2" 3 "ϵȼ1" 0 "ϵȼ0" ; +VAL_TABLE_ BMS_ErrLowVolLo 12 "ϵȼ4" 9 "ϵȼ3" 6 "ϵȼ2" 3 "ϵȼ1" 0 "ϵȼ0" ; +VAL_TABLE_ BMS_ErrLowVolHi 12 "ϵȼ4" 9 "ϵȼ3" 6 "ϵȼ2" 3 "ϵȼ1" 0 "ϵȼ0" ; +VAL_TABLE_ BMS_ErrSOCDevLrg 12 "ϵȼ4" 9 "ϵȼ3" 6 "ϵȼ2" 3 "ϵȼ1" 0 "ϵȼ0" ; +VAL_TABLE_ BMS_ErrBMS 12 "ϵȼ4" 9 "ϵȼ3" 6 "ϵȼ2" 3 "ϵȼ1" 0 "ϵȼ0" ; +VAL_TABLE_ BMS_ErrHVLk 12 "ϵȼ4" 9 "ϵȼ3" 6 "ϵȼ2" 3 "ϵȼ1" 0 "ϵȼ0" ; +VAL_TABLE_ BMS_ErrIR 12 "ϵȼ4" 9 "ϵȼ3" 6 "ϵȼ2" 3 "ϵȼ1" 0 "ϵȼ0" ; +VAL_TABLE_ BMS_ErrPreChg 12 "ϵȼ4" 9 "ϵȼ3" 6 "ϵȼ2" 3 "ϵȼ1" 0 "ϵȼ0" ; +VAL_TABLE_ BMS_ErrInnerRly 12 "ϵȼ4" 9 "ϵȼ3" 6 "ϵȼ2" 3 "ϵȼ1" 0 "ϵȼ0" ; +VAL_TABLE_ BMS_ErrSOCLo 12 "ϵȼ4" 9 "ϵȼ3" 6 "ϵȼ2" 3 "ϵȼ1" 0 "ϵȼ0" ; +VAL_TABLE_ BMS_ErrFeedBack 12 "ϵȼ4" 9 "ϵȼ3" 6 "ϵȼ2" 3 "ϵȼ1" 0 "ϵȼ0" ; +VAL_TABLE_ BMS_ErrDischgCurHi 12 "ϵȼ4" 9 "ϵȼ3" 6 "ϵȼ2" 3 "ϵȼ1" 0 "ϵȼ0" ; +VAL_TABLE_ BMS_ErrChgCurHi 12 "ϵȼ4" 9 "ϵȼ3" 6 "ϵȼ2" 3 "ϵȼ1" 0 "ϵȼ0" ; +VAL_TABLE_ BMS_ErrCeTmpDiffHi 12 "ϵȼ4" 9 "ϵȼ3" 6 "ϵȼ2" 3 "ϵȼ1" 0 "ϵȼ0" ; +VAL_TABLE_ BMS_ErrCelVolDiffHi 12 "ϵȼ4" 9 "ϵȼ3" 6 "ϵȼ2" 3 "ϵȼ1" 0 "ϵȼ0" ; +VAL_TABLE_ BMS_ErrSysVolHi 12 "ϵȼ4" 9 "ϵȼ3" 6 "ϵȼ2" 3 "ϵȼ1" 0 "ϵȼ0" ; +VAL_TABLE_ BMS_ErrSysVolLo 12 "ϵȼ4" 9 "ϵȼ3" 6 "ϵȼ2" 3 "ϵȼ1" 0 "ϵȼ0" ; +VAL_TABLE_ BMS_ErrCelTmpLo 12 "ϵȼ4" 9 "ϵȼ3" 6 "ϵȼ2" 3 "ϵȼ1" 0 "ϵȼ0" ; +VAL_TABLE_ BMS_ErrCelTmpHi 12 "ϵȼ4" 9 "ϵȼ3" 6 "ϵȼ2" 3 "ϵȼ1" 0 "ϵȼ0" ; +VAL_TABLE_ BMS_ErrCelVolLo 12 "ϵȼ4" 9 "ϵȼ3" 6 "ϵȼ2" 3 "ϵȼ1" 0 "ϵȼ0" ; +VAL_TABLE_ BMS_ErrCelVolHi 12 "ϵȼ4" 9 "ϵȼ3" 6 "ϵȼ2" 3 "ϵȼ1" 0 "ϵȼ0" ; +VAL_TABLE_ BMS_Cmd_ECU_Req 164 "BMS" ; +VAL_TABLE_ BMS_Cmd_Func_Param 1 "" 0 "ر" ; +VAL_TABLE_ BMS_PostRes 255 "Update Failed since last reboot" 2 "Update Failed since last reboot" 1 "Update Success since last reboot" 0 "No Error" ; +VAL_TABLE_ BMS_FwVer 255 "Software version" 46 "Software revision" 1 "Software revision" 0 "Software version" ; +VAL_TABLE_ BMS_HwVer 46 "Hardware revision" 2 "Hardware version" 1 "Hardware revision" ; +VAL_TABLE_ BMS_Cmd_Return_Res 255 "Error Code" 0 "Error" ; +VAL_TABLE_ BMS_Cmd_Func_Req 35 "" 34 "" 5 "Ԥ" 4 "ѹϵź" 2 "SOC" 0 "" ; +VAL_TABLE_ BMS_BatRunSts 2 "ѹ" 1 "ŵѹ" 0 "δ" ; +VAL_TABLE_ BMS_BatFstChgSts 3 "" 2 "(ǹ)" 1 "ɳ(Ѳǹδ)" 0 "ɳ(δǹ)" ; +VAL_TABLE_ BMS_BatSlwChgSts 2 "" 1 "ɳ" 0 "ɳ" ; +VAL_TABLE_ BMS_WakeUpSrc 1 "CAN" 0 "Կ׻" ; +VAL_TABLE_ BMS_BatRlyStsFstChgNeg 1 "պ" 0 "Ͽ" ; +VAL_TABLE_ BMS_BatRlyStsFstChgPos 1 "պ" 0 "Ͽ" ; +VAL_TABLE_ BMS_BatErrLvl 4 "4(ж)" 3 "3()" 2 "2()" 1 "1()" 0 "" ; +VAL_TABLE_ BMS_BatHVLkStsChrg 1 "պ" 0 "Ͽ" ; +VAL_TABLE_ BMS_BatHVLkStsMain 1 "պ" 0 "Ͽ" ; +VAL_TABLE_ BMS_BatRlyStsSlwChg 1 "պ" 0 "Ͽ" ; +VAL_TABLE_ BMS_BatRlyStsNeg 1 "պ" 0 "Ͽ" ; +VAL_TABLE_ BMS_BatRlyStsPos 1 "պ" 0 "Ͽ" ; +VAL_TABLE_ BMS_BatRlystsPre 1 "պ" 0 "Ͽ" ; +VAL_TABLE_ BMS_BatlockSts 1 "" 0 "" ; + + +BO_ 3221225472 VECTOR__INDEPENDENT_SIG_MSG: 0 Vector__XXX + SG_ BMS_Cmd_Return : 0|8@1+ (1,0) [0|255] "" Vector__XXX + SG_ BMS_Cmd_Func_Param : 0|48@1+ (1,0) [-140737488355328|140737488355327] "" Vector__XXX + SG_ BMS_BatDisChgMaxPow : 0|16@1+ (10,0) [0|300000] "W" Vector__XXX + SG_ BMS_BatDisChgMaxCur : 0|16@1+ (1,0) [0|300] "A" Vector__XXX + SG_ BMS_Bat10sDisChgMaxPo : 0|16@1+ (10,0) [0|300000] "W" Vector__XXX + SG_ BMS_Bat10sDisChgMaxCur : 0|16@1+ (1,0) [0|300] "A" Vector__XXX + +BO_ 2565799844 BMS_Info2: 8 BMS + SG_ BMS_BatNum : 0|64@1+ (1,0) [0|1.84467440737096E+019] "" Vector__XXX + +BO_ 2550202276 BMS_Info1: 8 BMS + SG_ BMS_PostRes : 56|8@1+ (1,0) [0|255] "" Vector__XXX + SG_ BMS_FwVer : 24|32@1+ (1,0) [-2147483648|2147483647] "" Vector__XXX + SG_ BMS_HwVer : 0|24@1+ (1,0) [0|16777215] "" Vector__XXX + +BO_ 2559115172 BMS_Cmd_Res: 3 BMS + SG_ BMS_Cmd_Func : 8|8@1+ (1,0) [0|255] "" Vector__XXX + SG_ BMS_Cmd_ECU : 0|8@1+ (1,0) [0|255] "" Vector__XXX + +BO_ 2559091967 BMS_Cmd_Req: 8 BMS + SG_ BMS_Cmd_Func : 8|8@1+ (1,0) [0|255] "" Vector__XXX + SG_ BMS_Cmd_ECU : 0|8@1+ (1,0) [0|255] "" Vector__XXX + +BO_ 2551447460 BMS_Err3: 1 BMS + SG_ ChgStopErr8 : 7|1@1+ (1,0) [0|1] "" Vector__XXX + SG_ ChgStopErr7 : 6|1@1+ (1,0) [0|1] "" Vector__XXX + SG_ ChgStopErr6 : 5|1@1+ (1,0) [0|1] "" Vector__XXX + SG_ ChgStopErr5 : 4|1@1+ (1,0) [0|1] "" Vector__XXX + SG_ ChgStopErr4 : 3|1@1+ (1,0) [0|1] "" Vector__XXX + SG_ ChgStopErr3 : 2|1@1+ (1,0) [0|1] "" Vector__XXX + SG_ ChgStopErr2 : 1|1@1+ (1,0) [0|1] "" Vector__XXX + SG_ ChgStopErr1 : 0|1@1+ (1,0) [0|1] "" Vector__XXX + +BO_ 2551381924 BMS_Err2: 8 BMS + SG_ BMS_ErrTempSens : 48|4@1+ (1,0) [0|15] "" Vector__XXX + SG_ BMS_ErrInitFail : 44|4@1+ (1,0) [0|15] "" Vector__XXX + SG_ BMS_ErrFCSocketT : 40|4@1+ (1,0) [0|15] "" Vector__XXX + SG_ BMS_ErrPoleTmpHi : 36|4@1+ (1,0) [0|15] "" Vector__XXX + SG_ BMS_ErrLowVolLo : 28|4@1+ (1,0) [0|15] "" Vector__XXX + SG_ BMS_ErrLowVolHi : 24|4@1+ (1,0) [0|15] "" Vector__XXX + SG_ BMS_ErrSOCDevLrg : 8|4@1+ (1,0) [0|15] "" Vector__XXX + SG_ BMS_ErrBMS : 4|4@1+ (1,0) [0|15] "" Vector__XXX + +BO_ 2551316388 BMS_Err1: 8 BMS + SG_ BMS_ErrHVLk : 60|4@1+ (1,0) [0|15] "" Vector__XXX + SG_ BMS_ErrInnerRly : 48|4@1+ (1,0) [0|15] "" Vector__XXX + SG_ BMS_ErrIR : 56|4@1+ (1,0) [0|15] "" Vector__XXX + SG_ BMS_ErrPreChg : 52|4@1+ (1,0) [0|15] "" Vector__XXX + SG_ BMS_ErrSOCLo : 44|4@1+ (1,0) [0|15] "" Vector__XXX + SG_ BMS_ErrFeedBack : 40|4@1+ (1,0) [0|15] "" Vector__XXX + SG_ BMS_ErrDischgCurHi : 36|4@1+ (1,0) [0|15] "" Vector__XXX + SG_ BMS_ErrChgCurHi : 32|4@1+ (1,0) [0|15] "" Vector__XXX + SG_ BMS_ErrCelTmpDiffHi : 28|4@1+ (1,0) [0|15] "" Vector__XXX + SG_ BMS_ErrCelVolDiffHi : 24|4@1+ (1,0) [0|15] "" Vector__XXX + SG_ BMS_ErrSysVolLo : 20|4@1+ (1,0) [0|15] "" Vector__XXX + SG_ BMS_ErrSysVolHi : 16|4@1+ (1,0) [0|15] "" Vector__XXX + SG_ BMS_ErrCelTmpLo : 12|4@1+ (1,0) [0|15] "" Vector__XXX + SG_ BMS_ErrCelTmpHi : 8|4@1+ (1,0) [0|15] "" Vector__XXX + SG_ BMS_ErrCelVolLo : 4|4@1+ (1,0) [0|15] "" Vector__XXX + SG_ BMS_ErrCelVolHi : 0|4@1+ (1,0) [0|15] "" Vector__XXX + +BO_ 2550923172 BMS_Sts11: 3 BMS + SG_ BMS_EmgcyChgFlg : 16|1@1+ (1,0) [0|1] "" Vector__XXX + SG_ BMS_CurMaxChgSOC : 0|16@1+ (0.1,0) [0|100] "%" Vector__XXX + +BO_ 2550857636 BMS_Sts10: 8 BMS + SG_ BMS_RemainChgTime : 40|16@1+ (1,0) [0|65535] "min" Vector__XXX + SG_ BMS_ElapsedChgTime : 24|16@1+ (1,0) [0|65535] "min" Vector__XXX + SG_ BMS_SupplyVolt : 16|8@1+ (0.1,0) [0|30] "V" Vector__XXX + SG_ BMS_BatSOE : 0|16@1+ (0.1,0) [0|100] "%" Vector__XXX + +BO_ 2550792100 BMS_Sts9: 8 BMS + SG_ BMS_FstChgVolRqst : 16|16@1+ (1,0) [0|65535] "V" Vector__XXX + SG_ BMS_FstChgCurRqst : 0|16@1+ (1,0) [0|100] "A" Vector__XXX + SG_ BMS_CC2 : 48|16@1+ (1,0) [0|1] "mv" Vector__XXX + SG_ BMS_CC : 32|16@1+ (1,0) [0|1] "ohm" Vector__XXX + +BO_ 2550726564 BMS_Sts8: 7 BMS + SG_ BMS_FstChgCapA : 16|8@1+ (1,0) [0|255] "A" Vector__XXX + SG_ BMS_CurChgPow : 24|16@1+ (10,0) [0|300000] "W" Vector__XXX + SG_ BMS_CurChgCur : 40|16@1+ (1,0) [0|300] "A" Vector__XXX + SG_ BMS_ChgPlugSts : 8|1@1+ (1,0) [0|1] "" Vector__XXX + SG_ BMS_ChgPlugS3Sts : 9|1@1+ (1,0) [0|1] "" Vector__XXX + SG_ BMS_OBCChgCapA : 0|8@1+ (1,0) [0|63] "A" Vector__XXX + +BO_ 2550661028 BMS_Sts7: 8 BMS + SG_ BMS_OBCChgVolRqst : 16|16@1+ (0.1,0) [0|1100] "V" Vector__XXX + SG_ BMS_OBCChgCurRqst : 0|16@1+ (0.1,0) [0|300] "A" Vector__XXX + SG_ BMS_CPSts : 48|8@1+ (1,0) [0|255] "%" Vector__XXX + SG_ BMS_CCSts : 56|1@1+ (1,0) [0|1] "" Vector__XXX + SG_ BMS_CC2Sts : 57|1@1+ (1,0) [0|1] "" Vector__XXX + SG_ BMS_BatChgVolCpl : 32|16@1+ (1,0) [0|65535] "V" Vector__XXX + +BO_ 2550595492 BMS_Sts6: 6 BMS + SG_ BMS_FCSocketMaxT : 40|8@1+ (1,-40) [-40|210] "" Vector__XXX + SG_ BMS_BatMinCelTNum : 24|8@1+ (1,0) [0|255] "" Vector__XXX + SG_ BMS_BatMinCelT : 16|8@1+ (1,-40) [-40|210] "" Vector__XXX + SG_ BMS_BatMaxCelTNum : 8|8@1+ (1,0) [0|255] "" Vector__XXX + SG_ BMS_BatMaxCelT : 0|8@1+ (1,-40) [-40|210] "" Vector__XXX + SG_ BMS_BatAvgCelT : 32|8@1+ (1,-40) [-40|210] "" Vector__XXX + +BO_ 2550529956 BMS_Sts5: 8 BMS + SG_ BMS_BatAvgCelVol : 48|16@1+ (0.001,0) [0|5] "V" Vector__XXX + SG_ BMS_BatMinCelVNum : 40|8@1+ (1,0) [0|255] "" Vector__XXX + SG_ BMS_BatMinCelVol : 24|16@1+ (0.001,0) [0|5] "V" Vector__XXX + SG_ BMS_BatMaxCelVNum : 16|8@1+ (1,0) [0|255] "" Vector__XXX + SG_ BMS_BatMaxCelVol : 0|16@1+ (0.001,0) [0|5] "V" Vector__XXX + +BO_ 2550464420 BMS_Sts4: 8 BMS + SG_ BMS_BatChgMaxPow : 16|16@1+ (10,0) [0|300000] "W" Vector__XXX + SG_ BMS_BatChgMaxCur : 48|16@1+ (1,0) [0|300] "A" Vector__XXX + SG_ BMS_Bat10sChgMaxPow : 0|16@1+ (10,0) [0|300000] "W" Vector__XXX + SG_ BMS_Bat10sChgMaxCur : 32|16@1+ (1,0) [0|300] "A" Vector__XXX + +BO_ 2550398884 BMS_Sts3: 8 BMS + SG_ BMS_BatDisChgMaxPow : 16|16@1+ (10,0) [0|300000] "W" Vector__XXX + SG_ BMS_BatDisChgMaxCur : 48|16@1+ (1,0) [0|300] "A" Vector__XXX + SG_ BMS_Bat10sDisChgMaxPo : 0|16@1+ (10,0) [0|300000] "W" Vector__XXX + SG_ BMS_Bat10sDisChgMaxCur : 32|16@1+ (1,0) [0|300] "A" Vector__XXX + +BO_ 2550333348 BMS_Sts2: 8 BMS + SG_ BMS_BatOuterVol : 0|16@1+ (0.1,0) [0|1100] "V" Vector__XXX + SG_ BMS_BatInnerVol : 16|16@1+ (0.1,0) [0|1100] "V" Vector__XXX + SG_ BMS_BatTalCur : 32|16@1+ (0.1,-300) [-300|300] "A" Vector__XXX + SG_ BMS_BatIslatRes : 48|16@1+ (1,0) [0|60000] "kOhm" Vector__XXX + +BO_ 2550267812 BMS_Sts1: 8 BMS + SG_ BMS_WakeUpSrc : 50|2@1+ (1,0) [0|1] "" Vector__XXX + SG_ BMS_BatSOH : 32|16@1+ (0.1,0) [0|100] "%" Vector__XXX + SG_ BMS_BatSOC : 16|16@1+ (0.1,0) [0|100] "%" Vector__XXX + SG_ BMS_BatSlwChgSts : 56|2@1+ (1,0) [0|2] "" Vector__XXX + SG_ BMS_BatRunSts : 60|4@1+ (1,0) [0|2] "" Vector__XXX + SG_ BMS_BatRlyStsSlwChg : 4|1@1+ (1,0) [0|1] "" Vector__XXX + SG_ BMS_BatRlyStsPre : 1|1@1+ (1,0) [0|1] "" Vector__XXX + SG_ BMS_BatRlyStsPos : 2|1@1+ (1,0) [0|1] "" Vector__XXX + SG_ BMS_BatRlyStsNeg : 3|1@1+ (1,0) [0|1] "" Vector__XXX + SG_ BMS_BatRlyStsFstChgPos : 48|1@1+ (1,0) [0|1] "" Vector__XXX + SG_ BMS_BatRlyStsFstChgNeg : 49|1@1+ (1,0) [0|1] "" Vector__XXX + SG_ BMS_BatLockSts : 0|1@1+ (1,0) [0|1] "" Vector__XXX + SG_ BMS_BatHVLkStsMain : 5|1@1+ (1,0) [0|1] "" Vector__XXX + SG_ BMS_BatHVLkStsChrg : 6|1@1+ (1,0) [0|1] "" Vector__XXX + SG_ BMS_BatFstChgSts : 58|2@1+ (1,0) [0|2] "" Vector__XXX + SG_ BMS_BatErrLvl : 8|8@1+ (1,0) [-128|127] "" Vector__XXX + + + +CM_ BO_ 3221225472 "This is a message for not used signals, created by Vector CANdb++ DBC OLE DB Provider."; +CM_ SG_ 3221225472 BMS_Cmd_Return "ƹӦ"; +CM_ SG_ 3221225472 BMS_Cmd_Func_Param "ƹܲ"; +CM_ SG_ 3221225472 BMS_BatDisChgMaxPow "ϵͳŵ繦"; +CM_ SG_ 3221225472 BMS_BatDisChgMaxCur "ϵͳŵ"; +CM_ SG_ 3221225472 BMS_Bat10sDisChgMaxPo "ϵͳ10sŵ繦 "; +CM_ SG_ 3221225472 BMS_Bat10sDisChgMaxCur "ϵͳ10sŵ"; +CM_ SG_ 2565799844 BMS_BatNum "رŸݡGBT 34014-2017 +壬24ַ4֡ͣЭ +Э鶨塱"; +CM_ SG_ 2550202276 BMS_PostRes "Լ"; +CM_ SG_ 2550202276 BMS_FwVer "̼汾 "; +CM_ SG_ 2550202276 BMS_HwVer "Ӳ汾"; +CM_ SG_ 2559115172 BMS_Cmd_Func "ƹ"; +CM_ SG_ 2559115172 BMS_Cmd_ECU "ƶ"; +CM_ SG_ 2559091967 BMS_Cmd_Func "ƹ"; +CM_ SG_ 2559091967 BMS_Cmd_ECU "ƶ"; +CM_ SG_ 2551447460 ChgStopErr8 "ճ״̬ijʱֹ"; +CM_ SG_ 2551447460 ChgStopErr7 "ֹͣ"; +CM_ SG_ 2551447460 ChgStopErr6 "BMSֹܴ"; +CM_ SG_ 2551447460 ChgStopErr5 "ڲֹ"; +CM_ SG_ 2551447460 ChgStopErr4 "ֹ"; +CM_ SG_ 2551447460 ChgStopErr3 "ֹ"; +CM_ SG_ 2551447460 ChgStopErr2 "˹ֹͣ"; +CM_ SG_ 2551447460 ChgStopErr1 "عֹͣ"; +CM_ SG_ 2551381924 BMS_ErrTempSens "¶ȴ "; +CM_ SG_ 2551381924 BMS_ErrInitFail "BMSʼɹ"; +CM_ SG_ 2551381924 BMS_ErrFCSocketT "¶ȹ"; +CM_ SG_ 2551381924 BMS_ErrPoleTmpHi "¶ȹ"; +CM_ SG_ 2551381924 BMS_ErrLowVolLo "ѹ"; +CM_ SG_ 2551381924 BMS_ErrLowVolHi "ѹ"; +CM_ SG_ 2551381924 BMS_ErrSOCDevLrg "SOCƫ"; +CM_ SG_ 2551381924 BMS_ErrBMS "BMS"; +CM_ SG_ 2551316388 BMS_ErrHVLk "ѹ"; +CM_ SG_ 2551316388 BMS_ErrInnerRly "̵"; +CM_ SG_ 2551316388 BMS_ErrIR "Ե "; +CM_ SG_ 2551316388 BMS_ErrPreChg "Ԥ"; +CM_ SG_ 2551316388 BMS_ErrSOCLo "SOC"; +CM_ SG_ 2551316388 BMS_ErrFeedBack ""; +CM_ SG_ 2551316388 BMS_ErrDischgCurHi "ŵ"; +CM_ SG_ 2551316388 BMS_ErrChgCurHi " "; +CM_ SG_ 2551316388 BMS_ErrCelTmpDiffHi "²"; +CM_ SG_ 2551316388 BMS_ErrCelVolDiffHi "ѹ"; +CM_ SG_ 2551316388 BMS_ErrSysVolLo "ܵѹ"; +CM_ SG_ 2551316388 BMS_ErrSysVolHi "ܵѹ"; +CM_ SG_ 2551316388 BMS_ErrCelTmpLo "¶ȹ "; +CM_ SG_ 2551316388 BMS_ErrCelTmpHi "¶ȹ"; +CM_ SG_ 2551316388 BMS_ErrCelVolLo "ѹ "; +CM_ SG_ 2551316388 BMS_ErrCelVolHi "ѹ"; +CM_ SG_ 2550923172 BMS_EmgcyChgFlg "Ӧ־"; +CM_ SG_ 2550923172 BMS_CurMaxChgSOC "ǰijSOC"; +CM_ SG_ 2550857636 BMS_RemainChgTime "Ԥʣʱ "; +CM_ SG_ 2550857636 BMS_ElapsedChgTime "Ѿʱ"; +CM_ SG_ 2550857636 BMS_SupplyVolt "пصصѹ"; +CM_ SG_ 2550857636 BMS_BatSOE "ϵͳSOE"; +CM_ SG_ 2550792100 BMS_FstChgVolRqst "BMSѹ"; +CM_ SG_ 2550792100 BMS_FstChgCurRqst "BMS"; +CM_ SG_ 2550792100 BMS_CC2 "BMSCC2״̬䣩"; +CM_ SG_ 2550792100 BMS_CC "BMSCC״̬䣩"; +CM_ SG_ 2550726564 BMS_FstChgCapA "BMSװó䣩"; +CM_ SG_ 2550726564 BMS_CurChgPow "ǰܻչ"; +CM_ SG_ 2550726564 BMS_CurChgCur "ǰܻյ"; +CM_ SG_ 2550726564 BMS_ChgPlugSts "BMSӲ״̬䣩"; +CM_ SG_ 2550726564 BMS_ChgPlugS3Sts "BMSCC_S3״̬䣩 "; +CM_ SG_ 2550726564 BMS_OBCChgCapA "BMSװó䣩"; +CM_ SG_ 2550661028 BMS_OBCChgVolRqst "BMSOBCѹ "; +CM_ SG_ 2550661028 BMS_OBCChgCurRqst "BMSOBC"; +CM_ SG_ 2550661028 BMS_CPSts "BMSCP״̬䣩"; +CM_ SG_ 2550661028 BMS_CCSts "BMSCC״̬䣩"; +CM_ SG_ 2550661028 BMS_CC2Sts "BMSCC2״̬䣩 "; +CM_ SG_ 2550661028 BMS_BatChgVolCpl "ϵͳɵѹ"; +CM_ SG_ 2550595492 BMS_FCSocketMaxT "¶ȵ¶"; +CM_ SG_ 2550595492 BMS_BatMinCelTNum "¶ȵغ"; +CM_ SG_ 2550595492 BMS_BatMinCelT "¶ȵ¶ "; +CM_ SG_ 2550595492 BMS_BatMaxCelTNum "¶ȵغ"; +CM_ SG_ 2550595492 BMS_BatMaxCelT "¶ȵ¶"; +CM_ SG_ 2550595492 BMS_BatAvgCelT "ƽ¶"; +CM_ SG_ 2550529956 BMS_BatAvgCelVol "ƽѹ"; +CM_ SG_ 2550529956 BMS_BatMinCelVNum "͵غ"; +CM_ SG_ 2550529956 BMS_BatMinCelVol "͵ѹ"; +CM_ SG_ 2550529956 BMS_BatMaxCelVNum "ߵغ"; +CM_ SG_ 2550529956 BMS_BatMaxCelVol "ߵѹ"; +CM_ SG_ 2550464420 BMS_BatChgMaxPow "ϵͳ繦"; +CM_ SG_ 2550464420 BMS_BatChgMaxCur "ϵͳ"; +CM_ SG_ 2550464420 BMS_Bat10sChgMaxPow "ϵͳ10s繦"; +CM_ SG_ 2550464420 BMS_Bat10sChgMaxCur "ϵͳ10s"; +CM_ SG_ 2550398884 BMS_BatDisChgMaxPow "ϵͳŵ繦"; +CM_ SG_ 2550398884 BMS_BatDisChgMaxCur "ϵͳŵ"; +CM_ SG_ 2550398884 BMS_Bat10sDisChgMaxPo "ϵͳ10sŵ繦 "; +CM_ SG_ 2550398884 BMS_Bat10sDisChgMaxCur "ϵͳ10sŵ"; +CM_ SG_ 2550333348 BMS_BatOuterVol "ϵͳܵѹ"; +CM_ SG_ 2550333348 BMS_BatInnerVol "ϵͳڲܵѹ"; +CM_ SG_ 2550333348 BMS_BatTalCur "ϵͳܵ"; +CM_ SG_ 2550333348 BMS_BatIslatRes "ϵͳԵֵ"; +CM_ SG_ 2550267812 BMS_WakeUpSrc "BMSԴ"; +CM_ SG_ 2550267812 BMS_BatSOH "ϵͳSOH"; +CM_ SG_ 2550267812 BMS_BatSOC "ϵͳSOC"; +CM_ SG_ 2550267812 BMS_BatSlwChgSts "ϵͳ״̬"; +CM_ SG_ 2550267812 BMS_BatRunSts "ϵͳ״̬"; +CM_ SG_ 2550267812 BMS_BatRlyStsSlwChg "ϵͳ̵״̬"; +CM_ SG_ 2550267812 BMS_BatRlyStsPre "ϵͳԤ̵״̬"; +CM_ SG_ 2550267812 BMS_BatRlyStsPos "ϵͳ̵״̬"; +CM_ SG_ 2550267812 BMS_BatRlyStsNeg "ϵͳ̵״̬"; +CM_ SG_ 2550267812 BMS_BatRlyStsFstChgPos "ϵͳ̵״̬"; +CM_ SG_ 2550267812 BMS_BatRlyStsFstChgNeg "ϵͳ为̵״̬"; +CM_ SG_ 2550267812 BMS_BatLockSts "ؽ״̬"; +CM_ SG_ 2550267812 BMS_BatHVLkStsMain "ѹ״̬"; +CM_ SG_ 2550267812 BMS_BatHVLkStsChrg "ѹ״̬"; +CM_ SG_ 2550267812 BMS_BatFstChgSts "ϵͳ״̬"; +CM_ SG_ 2550267812 BMS_BatErrLvl "ϵͳϵȼ"; +BA_DEF_ "BusType" STRING ; +BA_DEF_DEF_ "BusType" "CAN"; +VAL_ 3221225472 BMS_Cmd_Return 255 "Error Code" 0 "Error" ; +VAL_ 3221225472 BMS_Cmd_Func_Param 1 "" 0 "ر" ; +VAL_ 2550202276 BMS_PostRes 2 "Update Failed since last reboot" 1 "Update Success since last reboot" 0 "No Error" ; +VAL_ 2559115172 BMS_Cmd_Func 35 "" 34 "" 5 "Ԥ" 4 "ѹϵź" 2 "SOC" 0 "" ; +VAL_ 2559091967 BMS_Cmd_Func 35 "" 34 "" 5 "Ԥ" 4 "ѹϵź" 2 "SOC" 0 "" ; +VAL_ 2559091967 BMS_Cmd_ECU 164 "BMS" ; +VAL_ 2551381924 BMS_ErrTempSens 12 "ϵȼ4" 9 "ϵȼ3" 6 "ϵȼ2" 3 "ϵȼ1" 0 "ϵȼ0" ; +VAL_ 2551381924 BMS_ErrInitFail 12 "ϵȼ4" 9 "ϵȼ3" 6 "ϵȼ2" 3 "ϵȼ1" 0 "ϵȼ0" ; +VAL_ 2551381924 BMS_ErrFCSocketT 12 "ϵȼ4" 9 "ϵȼ3" 6 "ϵȼ2" 3 "ϵȼ1" 0 "ϵȼ0" ; +VAL_ 2551381924 BMS_ErrPoleTmpHi 12 "ϵȼ4" 9 "ϵȼ3" 6 "ϵȼ2" 3 "ϵȼ1" 0 "ϵȼ0" ; +VAL_ 2551381924 BMS_ErrLowVolLo 12 "ϵȼ4" 9 "ϵȼ3" 6 "ϵȼ2" 3 "ϵȼ1" 0 "ϵȼ0" ; +VAL_ 2551381924 BMS_ErrLowVolHi 12 "ϵȼ4" 9 "ϵȼ3" 6 "ϵȼ2" 3 "ϵȼ1" 0 "ϵȼ0" ; +VAL_ 2551381924 BMS_ErrSOCDevLrg 12 "ϵȼ4" 9 "ϵȼ3" 6 "ϵȼ2" 3 "ϵȼ1" 0 "ϵȼ0" ; +VAL_ 2551381924 BMS_ErrBMS 12 "ϵȼ4" 9 "ϵȼ3" 6 "ϵȼ2" 3 "ϵȼ1" 0 "ϵȼ0" ; +VAL_ 2551316388 BMS_ErrHVLk 12 "ϵȼ4" 9 "ϵȼ3" 6 "ϵȼ2" 3 "ϵȼ1" 0 "ϵȼ0" ; +VAL_ 2551316388 BMS_ErrInnerRly 12 "ϵȼ4" 9 "ϵȼ3" 6 "ϵȼ2" 3 "ϵȼ1" 0 "ϵȼ0" ; +VAL_ 2551316388 BMS_ErrIR 12 "ϵȼ4" 9 "ϵȼ3" 6 "ϵȼ2" 3 "ϵȼ1" 0 "ϵȼ0" ; +VAL_ 2551316388 BMS_ErrPreChg 12 "ϵȼ4" 9 "ϵȼ3" 6 "ϵȼ2" 3 "ϵȼ1" 0 "ϵȼ0" ; +VAL_ 2551316388 BMS_ErrSOCLo 12 "ϵȼ4" 9 "ϵȼ3" 6 "ϵȼ2" 3 "ϵȼ1" 0 "ϵȼ0" ; +VAL_ 2551316388 BMS_ErrFeedBack 12 "ϵȼ4" 9 "ϵȼ3" 6 "ϵȼ2" 3 "ϵȼ1" 0 "ϵȼ0" ; +VAL_ 2551316388 BMS_ErrDischgCurHi 12 "ϵȼ4" 9 "ϵȼ3" 6 "ϵȼ2" 3 "ϵȼ1" 0 "ϵȼ0" ; +VAL_ 2551316388 BMS_ErrChgCurHi 12 "ϵȼ4" 9 "ϵȼ3" 6 "ϵȼ2" 3 "ϵȼ1" 0 "ϵȼ0" ; +VAL_ 2551316388 BMS_ErrCelTmpDiffHi 12 "ϵȼ4" 9 "ϵȼ3" 6 "ϵȼ2" 3 "ϵȼ1" 0 "ϵȼ0" ; +VAL_ 2551316388 BMS_ErrCelVolDiffHi 12 "ϵȼ4" 9 "ϵȼ3" 6 "ϵȼ2" 3 "ϵȼ1" 0 "ϵȼ0" ; +VAL_ 2551316388 BMS_ErrSysVolLo 12 "ϵȼ4" 9 "ϵȼ3" 6 "ϵȼ2" 3 "ϵȼ1" 0 "ϵȼ0" ; +VAL_ 2551316388 BMS_ErrSysVolHi 12 "ϵȼ4" 9 "ϵȼ3" 6 "ϵȼ2" 3 "ϵȼ1" 0 "ϵȼ0" ; +VAL_ 2551316388 BMS_ErrCelTmpLo 12 "ϵȼ4" 9 "ϵȼ3" 6 "ϵȼ2" 3 "ϵȼ1" 0 "ϵȼ0" ; +VAL_ 2551316388 BMS_ErrCelTmpHi 12 "ϵȼ4" 9 "ϵȼ3" 6 "ϵȼ2" 3 "ϵȼ1" 0 "ϵȼ0" ; +VAL_ 2551316388 BMS_ErrCelVolLo 12 "ϵȼ4" 9 "ϵȼ3" 6 "ϵȼ2" 3 "ϵȼ1" 0 "ϵȼ0" ; +VAL_ 2551316388 BMS_ErrCelVolHi 12 "ϵȼ4" 9 "ϵȼ3" 6 "ϵȼ2" 3 "ϵȼ1" 0 "ϵȼ0" ; +VAL_ 2550267812 BMS_WakeUpSrc 1 "CAN" 0 "Կ׻" ; +VAL_ 2550267812 BMS_BatSlwChgSts 2 "" 1 "ɳ" 0 "ɳ" ; +VAL_ 2550267812 BMS_BatRunSts 2 "ѹ" 1 "ŵѹ" 0 "δ" ; +VAL_ 2550267812 BMS_BatRlyStsSlwChg 1 "պ" 0 "Ͽ" ; +VAL_ 2550267812 BMS_BatRlyStsPre 1 "պ" 0 "Ͽ" ; +VAL_ 2550267812 BMS_BatRlyStsPos 1 "պ" 0 "Ͽ" ; +VAL_ 2550267812 BMS_BatRlyStsNeg 1 "պ" 0 "Ͽ" ; +VAL_ 2550267812 BMS_BatRlyStsFstChgPos 1 "պ" 0 "Ͽ" ; +VAL_ 2550267812 BMS_BatRlyStsFstChgNeg 1 "պ" 0 "Ͽ" ; +VAL_ 2550267812 BMS_BatLockSts 1 "" 0 "" ; +VAL_ 2550267812 BMS_BatHVLkStsMain 1 "պ" 0 "Ͽ" ; +VAL_ 2550267812 BMS_BatHVLkStsChrg 1 "պ" 0 "Ͽ" ; +VAL_ 2550267812 BMS_BatFstChgSts 3 "" 2 "(ǹ)" 1 "ɳ(Ѳǹδ)" 0 "ɳ(δǹ)" ; +VAL_ 2550267812 BMS_BatErrLvl 4 "4(ж)" 3 "3()" 2 "2()" 1 "1()" 0 "" ; + diff --git a/files/ci_bot.jpg b/files/ci_bot.jpg new file mode 100644 index 0000000..86b26d7 Binary files /dev/null and b/files/ci_bot.jpg differ diff --git a/files/ci_bot.png b/files/ci_bot.png new file mode 100644 index 0000000..a7d24fd Binary files /dev/null and b/files/ci_bot.png differ diff --git a/files/dm.xlsx b/files/dm.xlsx new file mode 100644 index 0000000..c90aa39 Binary files /dev/null and b/files/dm.xlsx differ diff --git a/files/动态测试.xlsx b/files/动态测试.xlsx new file mode 100644 index 0000000..9f2e810 Binary files /dev/null and b/files/动态测试.xlsx differ diff --git a/lib/dm/fota/api_create_task.py b/lib/dm/fota/api_create_task.py new file mode 100644 index 0000000..762ccf2 --- /dev/null +++ b/lib/dm/fota/api_create_task.py @@ -0,0 +1,29 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- + +from tools.request_control import RequestControl +from config.settings import ConfigHandler +from tools.yaml_data_analysis import CaseData +from tools.regular_control import regular +import os +from tools.teardown_control import TearDownHandler + +class ApiCreateTask(object): + @staticmethod + def api_create_task(inData): + """ + :param inData: + :return: + """ + res = RequestControl().http_request(eval(inData)) + TearDownHandler().teardown_handle(res) + return res + + +if __name__ == '__main__': + + TestData = CaseData(os.path.join(ConfigHandler.data_path,'dm/fota/api_create_task.yaml')).case_process()[0] + re_data = regular(str(TestData)) + data = ApiCreateTask().api_create_task(re_data) + print(data) + \ No newline at end of file diff --git a/lib/dm/fota/api_swich.py b/lib/dm/fota/api_swich.py new file mode 100644 index 0000000..624a078 --- /dev/null +++ b/lib/dm/fota/api_swich.py @@ -0,0 +1,29 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- + +from tools.request_control import RequestControl +from config.settings import ConfigHandler +from tools.yaml_data_analysis import CaseData +from tools.regular_control import regular +import os +from tools.teardown_control import TearDownHandler + +class ApiSwich(object): + @staticmethod + def api_swich(inData): + """ + :param inData: + :return: + """ + res = RequestControl().http_request(eval(inData)) + TearDownHandler().teardown_handle(res) + return res + + +if __name__ == '__main__': + + TestData = CaseData(os.path.join(ConfigHandler.data_path,'dm/fota/api_swich.yaml')).case_process()[0] + re_data = regular(str(TestData)) + data = ApiSwich().api_swich(re_data) + print(data) + \ No newline at end of file diff --git a/lib/dm/fota/aut_wakeup.py b/lib/dm/fota/aut_wakeup.py new file mode 100644 index 0000000..192086c --- /dev/null +++ b/lib/dm/fota/aut_wakeup.py @@ -0,0 +1,29 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- + +from tools.request_control import RequestControl +from config.settings import ConfigHandler +from tools.yaml_data_analysis import CaseData +from tools.regular_control import regular +import os +from tools.teardown_control import TearDownHandler + +class AutWakeup(object): + @staticmethod + def aut_wakeup(inData): + """ + :param inData: + :return: + """ + res = RequestControl().http_request(eval(inData)) + TearDownHandler().teardown_handle(res) + return res + + +if __name__ == '__main__': + + TestData = CaseData(os.path.join(ConfigHandler.data_path,'dm/fota/aut_wakeup.yaml')).case_process()[0] + re_data = regular(str(TestData)) + data = AutWakeup().aut_wakeup(re_data) + print(data) + \ No newline at end of file diff --git a/lib/dmglobal/dmglobalfota/global_api_create_task.py b/lib/dmglobal/dmglobalfota/global_api_create_task.py new file mode 100644 index 0000000..b5d63df --- /dev/null +++ b/lib/dmglobal/dmglobalfota/global_api_create_task.py @@ -0,0 +1,29 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- + +from tools.request_control import RequestControl +from config.settings import ConfigHandler +from tools.yaml_data_analysis import CaseData +from tools.regular_control import regular +import os +from tools.teardown_control import TearDownHandler + +class GlobalApiCreateTask(object): + @staticmethod + def global_api_create_task(inData): + """ + :param inData: + :return: + """ + res = RequestControl().http_request(eval(inData)) + TearDownHandler().teardown_handle(res) + return res + + +if __name__ == '__main__': + + TestData = CaseData(os.path.join(ConfigHandler.data_path,'dmglobal/dmglobalfota/global_api_create_task.yaml')).case_process()[0] + re_data = regular(str(TestData)) + data = GlobalApiCreateTask().global_api_create_task(re_data) + print(data) + \ No newline at end of file diff --git a/lib/dmglobal/dmglobalfota/global_api_swich.py b/lib/dmglobal/dmglobalfota/global_api_swich.py new file mode 100644 index 0000000..8cb8662 --- /dev/null +++ b/lib/dmglobal/dmglobalfota/global_api_swich.py @@ -0,0 +1,29 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- + +from tools.request_control import RequestControl +from config.settings import ConfigHandler +from tools.yaml_data_analysis import CaseData +from tools.regular_control import regular +import os +from tools.teardown_control import TearDownHandler + +class GlobalApiSwich(object): + @staticmethod + def global_api_swich(inData): + """ + :param inData: + :return: + """ + res = RequestControl().http_request(eval(inData)) + TearDownHandler().teardown_handle(res) + return res + + +if __name__ == '__main__': + + TestData = CaseData(os.path.join(ConfigHandler.data_path,'dmglobal/dmglobalfota/global_api_swich.yaml')).case_process()[0] + re_data = regular(str(TestData)) + data = GlobalApiSwich().global_api_swich(re_data) + print(data) + \ No newline at end of file diff --git a/lib/dmglobal/dmglobalfota/global_aut_wakeup.py b/lib/dmglobal/dmglobalfota/global_aut_wakeup.py new file mode 100644 index 0000000..7fc457b --- /dev/null +++ b/lib/dmglobal/dmglobalfota/global_aut_wakeup.py @@ -0,0 +1,29 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- + +from tools.request_control import RequestControl +from config.settings import ConfigHandler +from tools.yaml_data_analysis import CaseData +from tools.regular_control import regular +import os +from tools.teardown_control import TearDownHandler + +class GlobalAutWakeup(object): + @staticmethod + def global_aut_wakeup(inData): + """ + :param inData: + :return: + """ + res = RequestControl().http_request(eval(inData)) + TearDownHandler().teardown_handle(res) + return res + + +if __name__ == '__main__': + + TestData = CaseData(os.path.join(ConfigHandler.data_path,'dmglobal/dmglobalfota/global_aut_wakeup.yaml')).case_process()[0] + re_data = regular(str(TestData)) + data = GlobalAutWakeup().global_aut_wakeup(re_data) + print(data) + \ No newline at end of file diff --git a/lib/dmglobal/dmglobalfota/global_completed_list.py b/lib/dmglobal/dmglobalfota/global_completed_list.py new file mode 100644 index 0000000..cbac092 --- /dev/null +++ b/lib/dmglobal/dmglobalfota/global_completed_list.py @@ -0,0 +1,29 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- + +from tools.request_control import RequestControl +from config.settings import ConfigHandler +from tools.yaml_data_analysis import CaseData +from tools.regular_control import regular +import os +from tools.teardown_control import TearDownHandler + +class GlobalCompletedList(object): + @staticmethod + def global_completed_list(inData): + """ + :param inData: + :return: + """ + res = RequestControl().http_request(eval(inData)) + TearDownHandler().teardown_handle(res) + return res + + +if __name__ == '__main__': + + TestData = CaseData(os.path.join(ConfigHandler.data_path,'dmglobal/dmglobalfota/global_completed_list.yaml')).case_process()[0] + re_data = regular(str(TestData)) + data = GlobalCompletedList().global_completed_list(re_data) + print(data) + \ No newline at end of file diff --git a/pytest.ini b/pytest.ini new file mode 100644 index 0000000..fb22b79 --- /dev/null +++ b/pytest.ini @@ -0,0 +1,16 @@ +[pytest] +markers = + prod: prod_case + testing: testing_case + staging: staging_case + testing_global: testing_global_case + staging_global: staging_global_case + +addopts = +;Generate HTML report + --html=./report/pytest_html/result.html --self-contained-html + ;--junit-xml=./report/pytest_html/result.xml + +; +;testpaths = ./test_case/dmglobal/dmglobalsystem +;testpaths = ./test_case/dm/versionsmanager diff --git a/requirements.txt b/requirements.txt new file mode 100644 index 0000000..a20792d --- /dev/null +++ b/requirements.txt @@ -0,0 +1,33 @@ +allure-pytest==2.9.45 +allure-python-commons==2.9.45 +atomicwrites==1.4.1 +attrs==21.4.0 +certifi==2022.6.15 +charset-normalizer==2.1.0 +colorama==0.4.5 +colorlog==6.6.0 +Faker==13.15.1 +idna==3.3 +importlib-metadata==4.12.0 +iniconfig==1.1.1 +jsonpath==0.82 +packaging==21.3 +pluggy==1.0.0 +py==1.11.0 +PyMySQL==1.0.2 +pytest==7.1.2 +pytest-html==3.1.1 +pytest-metadata==2.0.2 +python-dateutil==2.8.2 +pytz==2022.1 +PyYAML==6.0 +requests==2.28.1 +requests-toolbelt==0.9.1 +six==1.16.0 +tomli==2.0.1 +typing_extensions==4.3.0 +urllib3==1.26.11 +wincertstore==0.2 +zipp==3.8.1 +pyzbar==0.1.9 +pillow==9.2.0 \ No newline at end of file diff --git a/run.py b/run.py new file mode 100644 index 0000000..e97165a --- /dev/null +++ b/run.py @@ -0,0 +1,44 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- + +import traceback +import pytest +import os +from tools.log_control import INFO +from tools.yaml_control import GetYamlData +from config.settings import ConfigHandler +from tools.feishu_control import FeiShuTalkChatBot +from config.configs import Config + +def run(): + # 从配置文件中获取项目名称 + project_name = GetYamlData(ConfigHandler.config_path).get_yaml_data()['ProjectName'] + testing_evn = Config().__getattr__("CI_ENVIRONMENT_SLUG") + try: + try: + DAV_ENVIRONMENT_SLUG = Config().__getattr__('DAV_ENVIRONMENT_SLUG') + print("DAV_ENVIRONMENT_SLUG",DAV_ENVIRONMENT_SLUG) + INFO.logger.info("""开始执行{0}项目,环境{1}...""".format(project_name, DAV_ENVIRONMENT_SLUG)) + if '-' in DAV_ENVIRONMENT_SLUG: + DAV_ENVIRONMENT_SLUG=DAV_ENVIRONMENT_SLUG.replace("-","_") + pytest.main(['-m %s'%DAV_ENVIRONMENT_SLUG, '-W', 'ignore:Module already imported:pytest.PytestWarning','--alluredir', './report/tmp']) + os.system(r"allure generate ./report/tmp -o ./report/html --clean") + except: + INFO.logger.info("""开始执行{0}项目,默认运行{1}分支环境..""".format(project_name,testing_evn)) + pytest.main(['-m %s'%testing_evn, '-W', 'ignore:Module already imported:pytest.PytestWarning', '--alluredir', './report/tmp']) + os.system(r"allure generate ./report/tmp -o ./report/html --clean") + + except Exception: + # 如有异常,相关异常发送邮件 + e = traceback.format_exc() + # FeiShuTalkChatBot().error_feishu(e) + raise + + +if __name__ == '__main__': + run() + # pytest.main(['-m staging']) + + + + diff --git a/test_case/conftest.py b/test_case/conftest.py new file mode 100644 index 0000000..aae3932 --- /dev/null +++ b/test_case/conftest.py @@ -0,0 +1,214 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- + +import pytest +import os +from config.settings import ConfigHandler +from tools.yaml_control import GetYamlData +from tools.allure_control import allure_step,allure_step_no +import time +from tools.log_control import WARNING, INFO, ERROR +from py._xmlgen import html +from time import strftime +import requests +from tools.cache_control import Cache + +_PROJECT_NAME = GetYamlData(ConfigHandler.config_path).get_yaml_data()['ProjectName'] +_TEST_NAME = GetYamlData(ConfigHandler.config_path).get_yaml_data()['TestName'] + + + + +@pytest.fixture(scope="session", autouse=True) +def clear_report(): + try: + for one in os.listdir(ConfigHandler.report_path + f'/tmp'): + if 'json' in one: + os.remove(ConfigHandler.report_path + f'/tmp/{one}') + if 'txt' in one: + os.remove(ConfigHandler.report_path + f'/tmp/{one}') + except Exception as e: + print("allure数据清除失败", e) + + yield + print("ddd") + + +@pytest.fixture(scope="function", autouse=True) +def case_skip(in_data): + """处理跳过用例""" + if in_data['is_run'] is False: + # allure.dynamic.title(in_data[YAMLDate.DETAIL.value]) + allure_step_no(f"请求URL: 请求URL") + allure_step_no(f"请求方式: 请求URL") + allure_step("请求头: ", "请求URL") + allure_step("请求头: ", "请求URL") + allure_step("请求头: ", "请求URL") + allure_step("请求头: ", "请求URL") + pytest.skip() + + + +def pytest_terminal_summary(terminalreporter): + """ + 收集测试结果 + """ + + _PASSED = len([i for i in terminalreporter.stats.get('passed', []) if i.when != 'teardown']) + _ERROR = len([i for i in terminalreporter.stats.get('error', []) if i.when != 'teardown']) + _FAILED = len([i for i in terminalreporter.stats.get('failed', []) if i.when != 'teardown']) + _SKIPPED = len([i for i in terminalreporter.stats.get('skipped', []) if i.when != 'teardown']) + _TOTAL = terminalreporter._numcollected + _TIMES = time.time() - terminalreporter._sessionstarttime + + INFO.logger.info(f"成功用例数: {_PASSED}") + ERROR.logger.error(f"异常用例数: {_ERROR}") + ERROR.logger.error(f"失败用例数: {_FAILED}") + WARNING.logger.warning(f"跳过用例数: {_SKIPPED}") + INFO.logger.info("用例执行时长: %.2f" % _TIMES + " s") + + try: + _RATE = round((_PASSED + _SKIPPED) / _TOTAL * 100, 2) + INFO.logger.info("用例成功率: %.2f" % _RATE + " %") + except ZeroDivisionError: + INFO.logger.info("用例成功率: 0.00 %") + + + +# @pytest.fixture(scope="session", autouse=True) +# def write_case_process(): +# """ +# 获取所有用例,写入用例池中 +# :return: +# """ +# +# case_data = {} +# # 循环拿到所有存放用例的文件路径 +# for i in get_all_files(file_path=ConfigHandler.data_path, yaml_data_switch=True): +# # 循环读取文件中的数据 +# case_process = CaseData(i).case_process(case_id_switch=True) +# # 转换数据类型 +# for case in case_process: +# for k, v in case.items(): +# # 判断 case_id 是否已存在 +# case_id_exit = k in case_data.keys() +# # 如果case_id 不存在,则将用例写入缓存池中 +# if case_id_exit is False: +# case_data[k] = v +# # 当 case_id 为 True 存在时,则跑出异常 +# elif case_id_exit is True: +# raise ValueError(f"case_id: {k} 存在重复项, 请修改case_id\n" +# f"文件路径: {i}") +# +# Cache('case_process').set_caches(case_data) + + + + + + + +@pytest.mark.optionalhook +def pytest_html_results_table_header(cells): + # cells.insert(1, html.th('用例描述', class_="sortable", col="name")) # 表头添加Description + cells.insert(3, html.th('执行时间', class_='sortable time', col='time')) + # cells.pop(-1) # 删除link + + +@pytest.mark.optionalhook +def pytest_html_results_table_row(report, cells): + # cells.insert(1, html.td(report.description)) # 表头对应的内容 + cells.insert(3, html.td(strftime('%Y-%m-%d %H:%M:%S'), class_='col-time')) + # cells.pop(-1) # 删除link列 + + +@pytest.mark.optionalhook +def pytest_html_results_table_html(report, data): # 清除执行成功的用例logs + if report.passed: + del data[:] + data.append(html.div('正常通过用例不抓取日志', class_='empty log')) + + +# 修改Environment部分信息,配置测试报告环境信息 +@pytest.mark.optionalhook +def pytest_html_report_title(report): + report.title = "自动化测试报告" + + +def pytest_configure(config): + from config.configs import Config + import datetime + host = str(Config().get_host11()) + # 运行环境 + try: + env = Config().__getattr__("DAV_ENVIRONMENT_SLUG") + except: + env = Config().__getattr__("CI_ENVIRONMENT_SLUG") + current_time = (datetime.datetime.now() + datetime.timedelta(hours=8)).strftime('%Y-%m-%d %H:%M:%S') + # 添加接口地址与项目名称 + config._metadata["项目名称"] = _PROJECT_NAME + config._metadata['域名'] = host + config._metadata['测试环境'] = env + config._metadata['开始时间'] = current_time + try: + config._metadata['部署项目commit ID'] = Config.DAV_COMMIT_ID + config._metadata['分支'] = Config.DAV_COMMIT_BRANCH + config._metadata['部署的镜像tag'] = Config.DAV_IMAGE_TAG + config._metadata['环境名字'] = Config.DAV_ENVIRONMENT_SLUG + config._metadata['项目路径'] = Config.DAV_PROJECT_PATH + except: + pass + + # 删除Java_Home + if config._metadata.get("CI"): + config._metadata.pop("CI") + config._metadata.pop("CI_COMMIT_REF_NAME") + config._metadata.pop("CI_COMMIT_REF_SLUG") + config._metadata.pop("CI_COMMIT_SHA") + config._metadata.pop("CI_ENVIRONMENT_NAME") + config._metadata.pop("CI_ENVIRONMENT_SLUG") + config._metadata.pop("CI_JOB_ID") + config._metadata.pop("CI_JOB_NAME") + config._metadata.pop("CI_JOB_STAGE") + config._metadata.pop("CI_PIPELINE_ID") + config._metadata.pop("CI_PROJECT_DIR") + config._metadata.pop("CI_PROJECT_ID") + config._metadata.pop("CI_PROJECT_NAME") + config._metadata.pop("CI_PROJECT_NAMESPACE") + config._metadata.pop("CI_PROJECT_PATH") + config._metadata.pop("CI_PROJECT_URL") + config._metadata.pop("CI_REGISTRY") + config._metadata.pop("CI_REGISTRY_IMAGE") + config._metadata.pop("CI_REGISTRY_USER") + config._metadata.pop("CI_RUNNER_DESCRIPTION") + config._metadata.pop("CI_RUNNER_ID") + config._metadata.pop("CI_SERVER") + config._metadata.pop("CI_SERVER_NAME") + config._metadata.pop("CI_SERVER_REVISION") + config._metadata.pop("CI_SERVER_VERSION") + config._metadata.pop("GITLAB_CI") + config._metadata.pop("GITLAB_USER_EMAIL") + config._metadata.pop("GITLAB_USER_ID") + config._metadata.pop("JAVA_HOME") + config._metadata.pop("Packages") + config._metadata.pop("Platform") + config._metadata.pop("Plugins") + + +# 修改Summary部分的信息 +def pytest_html_results_summary(prefix, summary, postfix): + from config.configs import Config + test_user = Config().__getattr__("GITLAB_USER_NAME") + prefix.extend([html.p("所属部门: 达芬骑测试部")]) + prefix.extend([html.p("测试人员: {}".format(test_user))]) + + +@pytest.mark.hookwrapper +def pytest_runtest_makereport(item, call): + outcome = yield + report = outcome.get_result() + if item.function.__doc__ is None: + report.description = str(item.function.__name__) + else: + report.description = str(item.function.__doc__) + report.nodeid = report.nodeid.encode("utf-8").decode("unicode_escape") \ No newline at end of file diff --git a/test_case/dm/conftest.py b/test_case/dm/conftest.py new file mode 100644 index 0000000..ed1fa42 --- /dev/null +++ b/test_case/dm/conftest.py @@ -0,0 +1,66 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- + +import pytest +import requests +from tools.cache_control import Cache +# +@pytest.fixture(scope="session", autouse=True) +def change_env(): + """ + 获取登录的cookie + :return: + """ + from config.configs import Config + try: + testing_evn = Config().__getattr__("DAV_ENVIRONMENT_SLUG") + if "-" in testing_evn: + list_evn = testing_evn.split("-") + cur_evn = list_evn[0] + else: + cur_evn = testing_evn + except: + cur_evn = Config().__getattr__("CI_ENVIRONMENT_SLUG") + host = str(Config().__getattr__("hostbus")) + login_url = host + '/v1/mapping/update' + vin = Cache('vin').get_cache() + params = {'vin':vin,'environment':cur_evn} + headers = {"Content-Type": "application/json"} + try: + res = requests.post(url=login_url, json=params, headers=headers).json() + print("切换环境为%s"%cur_evn) + except Exception as e: + raise e + +# work_login_init() + + +@pytest.fixture(scope="session", autouse=True) +def work_login_init(): + """ + 获取登录的cookie + :return: + """ + from config.configs import Config + host = str(Config().get_host11()) + user = str(Config().__getattr__("dm_phone")) + pwd = str(Config().__getattr__("dm_password")) + login_url = host + '/auth/oauth/token' + params = {'username': user, + 'password': pwd, + 'grant_type': 'password', + 'client_id': 'client', + 'client_secret': 'AlplxNdEhtLVaVenq4A'} + headers = {"Content-Type": "application/json"} + try: + res = requests.post(url=login_url, params=params, headers=headers).json() + token = res['data']['accessToken'] + ref = res['data']['refreshToken'] + print(token) + Cache('login_token').set_caches(token) + Cache('refreshToken').set_caches(ref) + except Exception as e: + raise e +# +# work_login_init() +# change_env() \ No newline at end of file diff --git a/test_case/dm/fota/test_api_create_task.py b/test_case/dm/fota/test_api_create_task.py new file mode 100644 index 0000000..595f789 --- /dev/null +++ b/test_case/dm/fota/test_api_create_task.py @@ -0,0 +1,38 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- + +import allure +import pytest +from config.settings import ConfigHandler +from tools.yaml_data_analysis import CaseData +from tools.assert_control import Assert +from tools.request_control import RequestControl +from tools.regular_control import regular +from tools.teardown_control import TearDownHandler +import os + +TestData = CaseData(os.path.join(ConfigHandler.data_path,'dm/fota/api_create_task.yaml')).case_process() +re_data = regular(str(TestData)) + +@pytest.mark.testing +@pytest.mark.prod +@allure.epic("DM系统") +@allure.feature("FOTA系统") +class TestApiCreateTask: + + @allure.story("新建任务") + @pytest.mark.parametrize('in_data', eval(re_data), ids=[i['detail'] for i in TestData]) + def test_api_create_task(self, in_data, case_skip): + """ + :param : + :return: + """ + + res = RequestControl().http_request(in_data) + TearDownHandler().teardown_handle(res) + Assert(res['assert']).assert_equality(response_data=res['response_data'], + sql_data=res['sql_data']) + + +if __name__ == '__main__': + pytest.main(['test_api_create_task.py', '-s', '-W', 'ignore:Module already imported:pytest.PytestWarning']) diff --git a/test_case/dm/fota/test_api_swich.py b/test_case/dm/fota/test_api_swich.py new file mode 100644 index 0000000..70f2344 --- /dev/null +++ b/test_case/dm/fota/test_api_swich.py @@ -0,0 +1,38 @@ + #!/usr/bin/env python +# -*- coding: utf-8 -*- + +import allure +import pytest +from config.settings import ConfigHandler +from tools.yaml_data_analysis import CaseData +from tools.assert_control import Assert +from tools.request_control import RequestControl +from tools.regular_control import regular +from tools.teardown_control import TearDownHandler +import os + +TestData = CaseData(os.path.join(ConfigHandler.data_path,'dm/fota/api_swich.yaml')).case_process() +re_data = regular(str(TestData)) + +@pytest.mark.testing +@pytest.mark.prod +@allure.epic("DM系统") +@allure.feature("FOTA系统") +class TestApiSwich: + + @allure.story("切换任务队列执行状态") + @pytest.mark.parametrize('in_data', eval(re_data), ids=[i['detail'] for i in TestData]) + def test_api_swich(self, in_data, case_skip): + """ + :param : + :return: + """ + + res = RequestControl().http_request(in_data) + TearDownHandler().teardown_handle(res) + Assert(res['assert']).assert_equality(response_data=res['response_data'], + sql_data=res['sql_data']) + + +if __name__ == '__main__': + pytest.main(['test_api_swich.py', '-s', '-W', 'ignore:Module already imported:pytest.PytestWarning']) diff --git a/test_case/dm/fota/test_aut_wakeup.py b/test_case/dm/fota/test_aut_wakeup.py new file mode 100644 index 0000000..42ce927 --- /dev/null +++ b/test_case/dm/fota/test_aut_wakeup.py @@ -0,0 +1,38 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- + +import allure +import pytest +from config.settings import ConfigHandler +from tools.yaml_data_analysis import CaseData +from tools.assert_control import Assert +from tools.request_control import RequestControl +from tools.regular_control import regular +from tools.teardown_control import TearDownHandler +import os + +TestData = CaseData(os.path.join(ConfigHandler.data_path,'dm/fota/aut_wakeup.yaml')).case_process() +re_data = regular(str(TestData)) + +@pytest.mark.testing +@pytest.mark.prod +@allure.epic("DM系统") +@allure.feature("FOTA系统") +class TestAutWakeup: + + @allure.story("一键唤醒") + @pytest.mark.parametrize('in_data', eval(re_data), ids=[i['detail'] for i in TestData]) + def test_aut_wakeup(self, in_data, case_skip): + """ + :param : + :return: + """ + + res = RequestControl().http_request(in_data) + TearDownHandler().teardown_handle(res) + Assert(res['assert']).assert_equality(response_data=res['response_data'], + sql_data=res['sql_data']) + + +if __name__ == '__main__': + pytest.main(['test_aut_wakeup.py', '-s', '-W', 'ignore:Module already imported:pytest.PytestWarning']) diff --git a/test_case/dmglobal/conftest.py b/test_case/dmglobal/conftest.py new file mode 100644 index 0000000..9e58c2f --- /dev/null +++ b/test_case/dmglobal/conftest.py @@ -0,0 +1,65 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- + +import pytest +import requests +from tools.cache_control import Cache + +@pytest.fixture(scope="session", autouse=True) +def change_env(): + """ + 获取登录的cookie + :return: + """ + from config.configs import Config + try: + testing_evn = Config().__getattr__("DAV_ENVIRONMENT_SLUG") + if "-" in testing_evn: + list_evn = testing_evn.split("-") + cur_evn = list_evn[0] + else: + cur_evn = testing_evn + except: + cur_evn = Config().__getattr__("CI_ENVIRONMENT_SLUG") + host = str(Config().__getattr__("hostbus_global")) + login_url = host + '/v1/mapping/update' + vin = Cache('vin_global').get_cache() + params = {'vin':vin,'environment':cur_evn} + headers = {"Content-Type": "application/json"} + try: + res = requests.post(url=login_url, json=params, headers=headers).json() + print("切换环境成功,当前环境为%s"%cur_evn) + except Exception as e: + raise e + +# +@pytest.fixture(scope="session", autouse=True) +def work_login_init(): + """ + 获取登录的cookie + :return: + """ + from config.configs import Config + host = str(Config().get_host11()) + user = str(Config().__getattr__("dm_global_phone")) + pwd = str(Config().__getattr__("dm_global_password")) + login_url = host + '/auth/oauth/token' + params = {'username': user, + 'password': pwd, + 'grant_type': 'password', + 'client_id': 'client', + 'client_secret': 'AlplxNdEhtLVaVenq4A'} + headers = {"Content-Type": "application/json"} + try: + res = requests.post(url=login_url, params=params, headers=headers).json() + print(res) + token = res['data']['accessToken'] + ref = res['data']['refreshToken'] + print(token) + Cache('login_token').set_caches(token) + Cache('refreshToken').set_caches(ref) + except Exception as e: + raise e + +# # work_login_init() +# change_env() \ No newline at end of file diff --git a/test_case/dmglobal/dmglobalfota/test_global_api_create_task.py b/test_case/dmglobal/dmglobalfota/test_global_api_create_task.py new file mode 100644 index 0000000..3436d20 --- /dev/null +++ b/test_case/dmglobal/dmglobalfota/test_global_api_create_task.py @@ -0,0 +1,39 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- + +import allure +import pytest +from config.settings import ConfigHandler +from tools.yaml_data_analysis import CaseData +from tools.assert_control import Assert +from tools.request_control import RequestControl +from tools.regular_control import regular +from tools.teardown_control import TearDownHandler +import os + +TestData = CaseData(os.path.join(ConfigHandler.data_path,'dmglobal/dmglobalfota/global_api_create_task.yaml')).case_process() +re_data = regular(str(TestData)) + + +@pytest.mark.testing_global +@pytest.mark.staging_global +@allure.epic("DM系统") +@allure.feature("FOTA系统") +class TestGlobalApiCreateTask: + + @allure.story("新建任务") + @pytest.mark.parametrize('in_data', eval(re_data), ids=[i['detail'] for i in TestData]) + def test_global_api_create_task(self, in_data, case_skip): + """ + :param : + :return: + """ + + res = RequestControl().http_request(in_data) + TearDownHandler().teardown_handle(res) + Assert(res['assert']).assert_equality(response_data=res['response_data'], + sql_data=res['sql_data']) + + +if __name__ == '__main__': + pytest.main(['test_global_api_create_task.py', '-s', '-W', 'ignore:Module already imported:pytest.PytestWarning']) diff --git a/test_case/dmglobal/dmglobalfota/test_global_api_swich.py b/test_case/dmglobal/dmglobalfota/test_global_api_swich.py new file mode 100644 index 0000000..59c022f --- /dev/null +++ b/test_case/dmglobal/dmglobalfota/test_global_api_swich.py @@ -0,0 +1,39 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- + +import allure +import pytest +from config.settings import ConfigHandler +from tools.yaml_data_analysis import CaseData +from tools.assert_control import Assert +from tools.request_control import RequestControl +from tools.regular_control import regular +from tools.teardown_control import TearDownHandler +import os + +TestData = CaseData(os.path.join(ConfigHandler.data_path,'dmglobal/dmglobalfota/global_api_swich.yaml')).case_process() +re_data = regular(str(TestData)) + + +@pytest.mark.testing_global +@pytest.mark.staging_global +@allure.epic("DM系统") +@allure.feature("FOTA系统") +class TestGlobalApiSwich: + + @allure.story("切换任务队列执行状态") + @pytest.mark.parametrize('in_data', eval(re_data), ids=[i['detail'] for i in TestData]) + def test_global_api_swich(self, in_data, case_skip): + """ + :param : + :return: + """ + + res = RequestControl().http_request(in_data) + TearDownHandler().teardown_handle(res) + Assert(res['assert']).assert_equality(response_data=res['response_data'], + sql_data=res['sql_data']) + + +if __name__ == '__main__': + pytest.main(['test_global_api_swich.py', '-s', '-W', 'ignore:Module already imported:pytest.PytestWarning']) diff --git a/test_case/dmglobal/dmglobalfota/test_global_aut_wakeup.py b/test_case/dmglobal/dmglobalfota/test_global_aut_wakeup.py new file mode 100644 index 0000000..fc9b1f4 --- /dev/null +++ b/test_case/dmglobal/dmglobalfota/test_global_aut_wakeup.py @@ -0,0 +1,39 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- + +import allure +import pytest +from config.settings import ConfigHandler +from tools.yaml_data_analysis import CaseData +from tools.assert_control import Assert +from tools.request_control import RequestControl +from tools.regular_control import regular +from tools.teardown_control import TearDownHandler +import os + +TestData = CaseData(os.path.join(ConfigHandler.data_path,'dmglobal/dmglobalfota/global_aut_wakeup.yaml')).case_process() +re_data = regular(str(TestData)) + + +@pytest.mark.testing_global +@pytest.mark.staging_global +@allure.epic("DM系统") +@allure.feature("FOTA系统") +class TestGlobalAutWakeup: + + @allure.story("一键唤醒") + @pytest.mark.parametrize('in_data', eval(re_data), ids=[i['detail'] for i in TestData]) + def test_global_aut_wakeup(self, in_data, case_skip): + """ + :param : + :return: + """ + + res = RequestControl().http_request(in_data) + TearDownHandler().teardown_handle(res) + Assert(res['assert']).assert_equality(response_data=res['response_data'], + sql_data=res['sql_data']) + + +if __name__ == '__main__': + pytest.main(['test_global_aut_wakeup.py', '-s', '-W', 'ignore:Module already imported:pytest.PytestWarning']) diff --git a/test_case/dmglobal/dmglobalfota/test_global_completed_list.py b/test_case/dmglobal/dmglobalfota/test_global_completed_list.py new file mode 100644 index 0000000..ffe69f5 --- /dev/null +++ b/test_case/dmglobal/dmglobalfota/test_global_completed_list.py @@ -0,0 +1,39 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- + +import allure +import pytest +from config.settings import ConfigHandler +from tools.yaml_data_analysis import CaseData +from tools.assert_control import Assert +from tools.request_control import RequestControl +from tools.regular_control import regular +from tools.teardown_control import TearDownHandler +import os + +TestData = CaseData(os.path.join(ConfigHandler.data_path,'dmglobal/dmglobalfota/global_completed_list.yaml')).case_process() +re_data = regular(str(TestData)) + + +@pytest.mark.testing_global +@pytest.mark.staging_global +@allure.epic("DM系统") +@allure.feature("FOTA系统") +class TestGlobalCompletedList: + + @allure.story("已完成任务列表") + @pytest.mark.parametrize('in_data', eval(re_data), ids=[i['detail'] for i in TestData]) + def test_global_completed_list(self, in_data, case_skip): + """ + :param : + :return: + """ + + res = RequestControl().http_request(in_data) + TearDownHandler().teardown_handle(res) + Assert(res['assert']).assert_equality(response_data=res['response_data'], + sql_data=res['sql_data']) + + +if __name__ == '__main__': + pytest.main(['test_global_completed_list.py', '-s', '-W', 'ignore:Module already imported:pytest.PytestWarning']) diff --git a/tools/__init__.py b/tools/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/tools/allure_attchement_type_enum.py b/tools/allure_attchement_type_enum.py new file mode 100644 index 0000000..4473142 --- /dev/null +++ b/tools/allure_attchement_type_enum.py @@ -0,0 +1,35 @@ +from enum import Enum, unique + + +@unique +class AllureAttachmentType(Enum): + """ + allure 报告的文件类型枚举 + """ + TEXT = "txt" + CSV = "csv" + TSV = "tsv" + URI_LIST = "uri" + + HTML = "html" + XML = "xml" + JSON = "json" + YAML = "yaml" + PCAP = "pcap" + + PNG = "png" + JPG = "jpg" + SVG = "svg" + GIF = "gif" + BMP = "bmp" + TIFF = "tiff" + + MP4 = "mp4" + OGG = "ogg" + WEBM = "webm" + + PDF = "pdf" + + @staticmethod + def attachment_types(): + return list(map(lambda c: c.value, AllureAttachmentType)) diff --git a/tools/allure_control.py b/tools/allure_control.py new file mode 100644 index 0000000..95227cb --- /dev/null +++ b/tools/allure_control.py @@ -0,0 +1,60 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- + + +import allure +import json +from config.settings import ConfigHandler +from tools.yaml_control import GetYamlData +from tools.allure_attchement_type_enum import AllureAttachmentType + + +def allure_step(step: str, var: str) -> None: + """ + :param step: 步骤及附件名称 + :param var: 附件内容 + """ + with allure.step(step): + allure.attach(json.dumps(str(var),ensure_ascii=False,indent=4), step,allure.attachment_type.JSON) + + +def allure_step_no(step: str): + """ + 无附件的操作步骤 + :param step: 步骤名称 + :return: + """ + with allure.step(step): + pass + + + + +def allure_attach(source: str, name: str, extension: str): + """ + allure报告上传附件、图片、excel等 + :param source: 文件路径,相当于传一个文件 + :param name: 附件名称 + :param extension: 附件的拓展名称 + :return: + """ + # 获取上传附件的尾缀,判断对应的 attachment_type 枚举值 + _NAME = name.split('.')[-1] + _attachment_type = getattr(AllureAttachmentType, _NAME, None) + allure.attach.file(source=source, + name=name, + attachment_type=_attachment_type if _attachment_type is None else _attachment_type.value, + extension=extension) + + +def SqlSwitch() -> bool: + """获取数据库开关""" + switch = GetYamlData(ConfigHandler.config_path).get_yaml_data()['MySqlDB']["switch"] + return switch + + +def getNotificationType(): + # 获取报告通知类型,是发送钉钉还是企业邮箱 + Date = GetYamlData(ConfigHandler.config_path).get_yaml_data()['NotificationType'] + return Date + diff --git a/tools/allure_report_control.py b/tools/allure_report_control.py new file mode 100644 index 0000000..fd8c45b --- /dev/null +++ b/tools/allure_report_control.py @@ -0,0 +1,105 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- + +import json +from config.settings import ConfigHandler +import os + + +class AllureFileClean: + """allure 报告数据清洗,提取业务需要得数据""" + + def __init__(self): + pass + + @classmethod + def _get_al_files(cls) -> list: + """ 获取所有 test-case 中的 json 文件 """ + filename = [] + # 获取所有文件下的子文件名称 + for root, dirs, files in os.walk(ConfigHandler.report_path + '/html/data/test-cases'): + for filePath in files: + path = os.path.join(root, filePath) + filename.append(path) + return filename + + def get_test_cases(self): + """ 获取所有 allure 报告中执行用例的情况""" + # 将所有数据都收集到files中 + files = [] + for i in self._get_al_files(): + with open(i, 'r', encoding='utf-8') as fp: + date = json.load(fp) + files.append(date) + return files + + def get_failed_case(self): + """ 获取到所有失败的用例标题和用例代码路径""" + error_cases = [] + for i in self.get_test_cases(): + if i['status'] == 'failed' or i['status'] == 'broken': + error_cases.append((i['name'], i['fullName'])) + return error_cases + + def get_failed_cases_detail(self): + """ 返回所有失败的测试用例相关内容 """ + date = self.get_failed_case() + # 判断有失败用例,则返回内容 + if len(date) >= 1: + values = "失败用例:\n" + values += " **********************************\n" + for i in date: + values += " " + i[0] + ":" + i[1] + "\n" + return values + else: + # 如果没有失败用例,则返回False + return "" + + # @classmethod + def get_case_count(cls): + """ 统计用例数量 """ + file_name = ConfigHandler.report_path + '/html/history/history-trend.json' + with open(file_name, 'r', encoding='utf-8') as fp: + date = json.load(fp)[0]['data'] + return date + + +class CaseCount: + def __init__(self): + self.AllureData = AllureFileClean() + + + def pass_count(self): + """用例成功数""" + return self.AllureData.get_case_count()['passed'] + + def failed_count(self): + """用例失败数""" + return self.AllureData.get_case_count()['failed'] + + def broken_count(self): + """用例异常数""" + return self.AllureData.get_case_count()['broken'] + + def skipped_count(self): + """用例跳过数""" + return self.AllureData.get_case_count()['skipped'] + + def total_count(self): + """用例总数""" + return self.AllureData.get_case_count()['total'] + + def pass_rate(self): + """用例成功率""" + # 四舍五入,保留2位小数 + try: + pass_rate = round((self.pass_count() + self.skipped_count()) / self.total_count() * 100, 2) + return pass_rate + except ZeroDivisionError: + return 0.00 + + +if __name__ == '__main__': + data = AllureFileClean().get_case_count() + print(data) + print(CaseCount().total_count()) diff --git a/tools/assert_control.py b/tools/assert_control.py new file mode 100644 index 0000000..48d90af --- /dev/null +++ b/tools/assert_control.py @@ -0,0 +1,198 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- +# @Time : 2022/3/28 14:18 +# @Author : 郭林莉 + + +import jsonpath +from tools.allure_control import SqlSwitch +from tools.log_control import ERROR, WARNING + + +class Assert: + + def __init__(self, assert_data): + self.assert_data = assert_data + + @staticmethod + def _check_params(response_data, sql_data): + """ + + :param response_data: 响应数据 + :param sql_data: 数据库数据 + :return: + """ + # 用例如果不执行,接口返回的相应数据和数据库断言都是 False,这里则判断跳过断言判断 + if response_data is False and sql_data is False: + return False + else: + # 判断断言的数据类型 + if isinstance(response_data, dict) and isinstance(sql_data, dict): + pass + elif isinstance(response_data, str): + pass + else: + # pass + raise ValueError("response_data、sql_data、assert_data的数据类型必须要是字典或者str类型") + + @staticmethod + def _assert_type(key: any, types: str, value: any): + # 是否相等 + if str(types) == "==": + assert key == value + # 判断实际结果小于预期结果 + elif str(types) == "lt": + assert key < value + # 判断实际结果小于等于预期结果 + elif str(types) == "le": + assert key < value + # 判断实际结果大于预期结果 + elif str(types) == "gt": + assert key > value + # 判断实际结果大于等于预期结果 + elif str(types) == "ge": + assert key >= value + # 判断实际结果不等于预期结果 + elif str(types) == "not_eq": + assert key != value + # 判断字符串是否相等 + elif str(types) == "str_eq": + assert key == value + # 判断长度是否相等 + elif str(types) == "len_eq": + # assertion isinstance(value, int) + assert len(key) == value + # 判断长度大于 + elif str(types) == "len_gt": + assert isinstance(value, int) + assert len(str(key)) > value + # 判断长度大于等于 + elif str(types) == 'len_ge': + assert isinstance(value, int) + assert len(key) >= value + elif str(types) == "len_lt": + assert isinstance(value, int) + assert len(key) < value + # 判断长度小于等于 + elif str(types) == 'len_le': + assert isinstance(value, int) + assert len(key) <= value + # 判断期望结果内容包含在实际结果中 + elif str(types) == "contains": + assert str(value) in str(key) + # 判断实际结果包含在期望结果中 + elif str(types) == 'contained_by': + assert str(value) in str(key) + # 检查响应内容的开头是否和预期结果内容的开头相等 + elif str(types) == 'startswith': + assert str(value).startswith(str(key)) + # 检查响应内容的结尾是否和预期结果内容相等 + elif str(types) == 'endswith': + assert str(key).endswith(str(value)) + elif str(types) == 'listdata': + assert key + elif str(types) == 'null_list': + assert not key + elif str(types) == 'listdata_len': + # print(len(key)) + assert len(key) >= value + elif str(types) == 'status_code': + print("KEY是", key) + print("value", value) + # assert (key['status_code'], 200) + elif str(types) == 'list_in': + # print(len(key)) + + if key in value: + assert True + else: + assert False + else: + raise ValueError(f"断言失败,目前不支持{types}断言类型,如需新增断言类型,请联系管理员") + + def sql_switch_handle(self, sql_data, assert_value, key, values, resp_data) -> None: + """ + + :param sql_data: 测试用例中的sql + :param assert_value: 断言内容 + :param key: + :param values: + :param resp_data: 预期结果 + :return: + """ + # 判断数据库为开关为关闭状态 + if SqlSwitch() is False: + WARNING.logger.warning(f"检测到数据库状态为关闭状态,程序已为您跳过此断言,断言值:{values}") + # 数据库开关为开启 + if SqlSwitch(): + # 判断当用例走的数据数据库断言,但是用例中未填写SQL + if sql_data == {'sql': None}: + raise ValueError("请在用例中添加您要查询的SQL语句。") + # 走正常SQL断言逻辑 + else: + res_sql_data = jsonpath.jsonpath(sql_data, assert_value)[0] + # 判断mysql查询出来的数据类型如果是bytes类型,转换成str类型 + if isinstance(res_sql_data, bytes): + res_sql_data = res_sql_data.decode('utf=8') + self._assert_type(types=self.assert_data[key]['type'], key=resp_data[0], value=res_sql_data) + + def assert_type_handle(self, assert_type, sql_data, assert_value, key, values, resp_data) -> None: + # 判断断言类型 + if assert_type == 'SQL': + self.sql_switch_handle(sql_data, assert_value, key, values, resp_data) + # 判断assertType为空的情况下,则走响应断言 + elif assert_type == 'str': + print('现在是str类型的断言') + print("预期值",assert_value) + print("实际值", resp_data) + self._assert_type(types=self.assert_data[key]['type'], key=resp_data, value=assert_value) + # elif assert_type == 'noResponse': + # print("预期值", assert_value) + # print("实际值", resp_data[0]) + # self._assert_type(types=self.assert_data[key]['type'], key=resp_data, value=assert_value) + elif assert_type is None: + # print("预期值", assert_value) + # print("实际值" ,resp_data[0]) + self._assert_type(types=self.assert_data[key]['type'], key=resp_data[0], value=assert_value) + else: + raise ValueError("断言失败,目前只支持数据库断言和响应断言") + + def assert_equality(self, response_data, sql_data): + # 判断数据类型 + if self._check_params(response_data, sql_data) is not False: + for key, values in self.assert_data.items(): + assert_value = self.assert_data[key]['value'] # 获取 yaml 文件中的期望value值 + assert_jsonpath = self.assert_data[key]['jsonpath'] # 获取到 yaml断言中的jsonpath的数据 + + + assert_type = self.assert_data[key]['AssertType'] + # 从yaml获取jsonpath,拿到对象的接口响应数据 + if assert_type=='str': + resp_data = response_data + print("resp_dataresp_dataresp_data",resp_data) + else: + resp_data = jsonpath.jsonpath(response_data, assert_jsonpath) + + # jsonpath 如果数据获取失败,会返回False,判断获取成功才会执行如下代码 + if resp_data is not False: + # 判断断言类型 + # print('断言',assert_type, sql_data, assert_value, key, values, resp_data) + self.assert_type_handle(assert_type, sql_data, assert_value, key, values, resp_data) + else: + ERROR.logger.error("JsonPath值获取失败{}".format(assert_jsonpath)) + raise ValueError(f"JsonPath值获取失败{assert_jsonpath}") + else: + raise '断言失败' + pass + +# +# if __name__ == '__main__': +# from tools.readfiletools.yaml_data_analysis import CaseData +# from config.setting import ConfigHandler +# #获取用例清洗后的数据 +# GetCaseData = CaseData(ConfigHandler.merchant_data_path + r'test_dm\InFo.yaml').case_process() +# #获取用例列表里面第一个用例的 assert +# cc = GetCaseData[0]['assert'] +# aa = {'status': 200, 'message': 'ok', 'data': '12312312'} +# bb = {'sql': None} +# Assert(cc).assert_equality(response_data=aa,sql_data=bb) diff --git a/tools/cache_control.py b/tools/cache_control.py new file mode 100644 index 0000000..cf2f31e --- /dev/null +++ b/tools/cache_control.py @@ -0,0 +1,70 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- + +from config.settings import ConfigHandler +import os +from typing import Any + + +class Cache: + """ 设置、读取缓存 """ + + def __init__(self, filename: [str, bool]) -> None: + # 如果filename不为空,则操作指定文件内容 + if filename: + self.path = os.path.join(ConfigHandler().cache_path,filename) + # 如果filename为None,则操作所有文件内容 + else: + self.path = ConfigHandler().cache_path + + def set_cache(self, key: str, value) -> None: + """ + 设置缓存, 只支持设置单字典类型缓存数据, 缓存文件如以存在,则替换之前的缓存内容 + :return: + """ + with open(self.path, 'w') as f: + f.write(str({key: value})) + + + def set_caches(self, value: any) -> None: + """ + 设置多组缓存数据 + :param value: 缓存内容 + :return: + """ + with open(self.path, 'w') as f: + f.write(str(value)) + + def get_cache(self) -> Any: + """ + 获取缓存数据 + :return: + """ + with open(self.path, 'r') as f: + return f.read() + + def clean_cache(self): + if not os.path.exists(self.path): + raise ValueError("您要删除的缓存文件不存在. {0}".format(self.path)) + os.remove(self.path) + + @classmethod + def clean_all_cache(cls) -> None: + """ + 清除所有缓存文件 + :return: + """ + cache_path = ConfigHandler().cache_path + # 列出目录下所有文件,生成一个list + list_dir = os.listdir(cache_path) + for i in list_dir: + # 循环删除文件夹下得所有内容 + os.remove(cache_path + "/" + i) + + +if __name__ == '__main__': + a = Cache('ecu_collection_select_id').get_cache() + + print(a) + # Cache('2022-6-2').set_cache("2022/6/2",'guolinli') + Cache('2022-6-3').set_caches( 'guolinli') diff --git a/tools/case_automatic_control.py b/tools/case_automatic_control.py new file mode 100644 index 0000000..433cfe3 --- /dev/null +++ b/tools/case_automatic_control.py @@ -0,0 +1,248 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- +# @Time : 2022/3/28 13:22 +# @Author : 郭林莉 + +import os +from config.settings import ConfigHandler +from tools.testcase_template import writePageFiles, write_testcase_file +from tools.yaml_control import GetYamlData +from tools.get_all_files_path import get_all_files + +class TestCaseAutomaticGeneration: + """自动生成自动化测试中的test_case代码""" + + # TODO 自动生成测试代码 + def __init__(self): + pass + + @classmethod + def case_date_path(cls) -> str: + """返回 yaml 用例文件路径""" + return ConfigHandler.data_path + + @classmethod + def case_path(cls) -> str: + """ 存放用例代码路径""" + return ConfigHandler.case_path + + def file_name(self, file: str) -> str: + """ + 通过 yaml文件的命名,将名称转换成 py文件的名称 + :param file: yaml 文件路径 + :return: 示例: DateDemo.py + """ + i = len(self.case_date_path()) + yaml_path = file[i:] + file_name = None + # 路径转换 + if '.yaml' in yaml_path: + file_name = yaml_path.replace('.yaml', '.py') + elif '.yml' in yaml_path: + file_name = yaml_path.replace('.yml', '.py') + return file_name + + def lib_page_path(self, file_path): + """ + 根据 yaml中的用例数据,生成对应分成中 lib 层代码路径 + :param file_path: yaml用例路径 + :return: D:\\Project\\lib\\DateDemo.py + """ + return ConfigHandler.lib_path + self.file_name(file_path) + + + def get_package_path(self, file_path: str) -> str: + """ + 根据不同的层级,获取 test_case 中需要依赖的包 + :return: from lib.test_dm import DateDemo + """ + lib_path = self.file_name(file_path) + i = lib_path.split(os.sep) + # 判断多层目录下的导报结构 + if len(i) > 1: + package_path = "from lib" + for files in i: + # 去掉路径中的 .py + if '.py' in files: + files = files[:-3] + package_path += "." + files + # 正常完整版本的多层结构导包路径 + package_path += ' import' + ' ' + i[-1][:-3] + return package_path + # 判断一层目录下的导报结构 + elif len(i) == 1: + return f"from lib.{i[0][:-3]} import {i[0][:-3]}" + + def get_case_path(self, file_path: str) -> tuple: + """ + 根据 yaml 中的用例,生成对应 testCase 层代码的路径 + :param file_path: yaml用例路径 + :return: D:\\Project\\test_case\\test_case_demo.py, test_case_demo.py + """ + + # 这里通过“\\” 符号进行分割,提取出来文件名称 + path = self.file_name(file_path).split(os.sep) + # 判断生成的 testcase 文件名称,需要以test_ 开头 + case_name = path[-1] = path[-1].replace(path[-1], "test_" + path[-1]) + new_name = os.sep.join(path) + return ConfigHandler.case_path + new_name, case_name + + @classmethod + def get_testcase_detail(cls, file_path: str) -> str: + """ + 获取用例描述 + :param file_path: yaml 用例路径 + :return: + """ + return GetYamlData(file_path).get_yaml_data()[0]['detail'] + + def get_test_class_title(self, file_path: str) -> str: + """ + 自动生成类名称 + :param file_path: + :return: sup_apply_list --> SupApplyList + """ + # 提取文件名称 + _FILE_NAME = os.path.split(self.file_name(file_path))[1][:-3] + _NAME = _FILE_NAME.split("_") + # 将文件名称格式,转换成类名称: sup_apply_list --> SupApplyList + for i in range(len(_NAME)): + _NAME[i] = _NAME[i].capitalize() + _CLASS_NAME = "".join(_NAME) + + return _CLASS_NAME + + @classmethod + def error_message(cls, param_name, file_path): + """ + 用例中填写不正确的相关提示 + :return: + """ + msg = f"用例中未找到 {param_name} 参数值,请检查新增的用例中是否填写对应的参数内容" \ + "如已填写,可能是 yaml 参数缩进不正确\n" \ + f"用例路径: {file_path}" + return msg + + def func_title(self, file_path: str) -> str: + """ + 函数名称 + :param file_path: yaml 用例路径 + :return: + """ + + _FILE_NAME = os.path.split(self.file_name(file_path))[1][:-3] + return _FILE_NAME + + @classmethod + def allure_epic(cls, case_data: dict, file_path) -> str: + """ + 用于 allure 报告装饰器中的内容 @allure.epic("项目名称") + :param file_path: 用例路径 + :param case_data: 用例数据 + :return: + """ + try: + return case_data['case_common']['allureEpic'] + except KeyError: + raise KeyError(cls.error_message( + param_name="allureEpic", + file_path=file_path + )) + + @classmethod + def allure_feature(cls, case_data: dict, file_path) -> str: + """ + 用于 allure 报告装饰器中的内容 @allure.feature("模块名称") + :param file_path: + :param case_data: + :return: + """ + try: + return case_data['case_common']['allureFeature'] + except KeyError: + raise KeyError(cls.error_message( + param_name="allureFeature", + file_path=file_path + )) + + @classmethod + def allure_story(cls, case_data: dict, file_path) -> str: + """ + 用于 allure 报告装饰器中的内容 @allure.story("测试功能") + :param file_path: + :param case_data: + :return: + """ + try: + return case_data['case_common']['allureStory'] + except KeyError: + raise KeyError(cls.error_message( + param_name="allureStory", + file_path=file_path + )) + + @classmethod + def markers(cls, case_data: dict, file_path) -> str: + """ + 用于 用例标签 中的内容 @pytest.mark.staging + :param case_data: 用例数据 + :param file_path: 用例路径 + :return: + """ + try: + if case_data['case_common']['markers'] == 'global': + mark = '@pytest.mark.testing_global\n@pytest.mark.staging_global' + else: + mark = '@pytest.mark.testing\n@pytest.mark.staging\n@pytest.mark.prod' + return mark + except KeyError as exc: + raise KeyError(cls.error_message(param_name="markers",)) from exc + + def mk_dir(self, file_path: str) -> None: + """ 判断生成自动化代码的文件夹路径是否存在,如果不存在,则自动创建 """ + _LibDirPath = os.path.split(self.lib_page_path(file_path))[0] + + _CaseDirPath = os.path.split(self.get_case_path(file_path)[0])[0] + _PathList = [_LibDirPath, _CaseDirPath] + for i in _PathList: + if not os.path.exists(i): + os.makedirs(i) + + def yaml_path(self, file_path: str) -> str: + """ + 生成动态 yaml 路径, 主要处理业务分层场景 + :param file_path: 如业务有多个层级, 则获取到每一层/test_dm/DateDemo.py + :return: Login/common.yaml + """ + i = len(self.case_date_path())+1 + # 兼容 linux 和 window 操作路径 + yaml_path = file_path[i:].replace("\\", "/") + return yaml_path + + def get_case_automatic(self) -> None: + """ 自动生成 测试代码""" + file_path = get_all_files(file_path=ConfigHandler.data_path, yaml_data_switch=True) + print(file_path) + for file in file_path: + print(file) + # 判断代理拦截的yaml文件,不生成test_case代码 + if 'proxy_data.yaml' not in file: + # 判断用例需要用的文件夹路径是否存在,不存在则创建 + self.mk_dir(file) + yaml_case_process = GetYamlData(file).get_yaml_data() + # print('yaml_case_process',yaml_case_process) + writePageFiles(self.get_test_class_title(file), self.func_title(file),self.lib_page_path(file), self.yaml_path(file)) + write_testcase_file( + self.markers(case_data=yaml_case_process, file_path=file_path), + allure_epic=self.allure_epic(case_data=yaml_case_process, file_path=file), + allure_feature=self.allure_feature(yaml_case_process, file_path=file), + class_title=self.get_test_class_title(file), func_title=self.func_title(file), + case_path=self.get_case_path(file)[0], + yaml_path=self.yaml_path(file), + file_name=self.get_case_path(file)[1], + allure_story=self.allure_story(case_data=yaml_case_process, file_path=file) + ) + + +if __name__ == '__main__': + TestCaseAutomaticGeneration().get_case_automatic() diff --git a/tools/excel_control.py b/tools/excel_control.py new file mode 100644 index 0000000..d4443bc --- /dev/null +++ b/tools/excel_control.py @@ -0,0 +1,53 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- + +import json + +import xlrd +from xlutils.copy import copy +from config.settings import ConfigHandler + + +def get_excel_data(sheet_name: str, case_name: any) -> list: + """ + 读取 Excel 中的数据 + :param sheet_name: excel 中的 sheet 页的名称 + :param case_name: 测试用例名称 + :return: + """ + res_list = [] + + excel_dire = ConfigHandler.excel_path + 'Login.xls' + work_book = xlrd.open_workbook(excel_dire, formatting_info=True) + + # 打开对应的子表 + work_sheet = work_book.sheet_by_name(sheet_name) + # 读取一行 + idx = 0 + for one in work_sheet.col_values(0): + # 运行需要运行的测试用例 + if case_name in one: + req_body_data = work_sheet.cell(idx, 9).value + resp_data = work_sheet.cell(idx, 11).value + res_list.append((req_body_data, json.loads(resp_data))) + idx += 1 + print(res_list) + return res_list + + +def set_excel_data(sheet_index: int) -> tuple: + """ + excel 写入 + :return: + """ + excel_dir = r'..\data\Login.xls' + work_book = xlrd.open_workbook(excel_dir, formatting_info=True) + work_book_new = copy(work_book) + + work_sheet_new = work_book_new.get_sheet(sheet_index) + return work_book_new, work_sheet_new + + +if __name__ == '__main__': + get_excel_data("登录", 'Login001') + diff --git a/tools/exceltoyaml_control.py b/tools/exceltoyaml_control.py new file mode 100644 index 0000000..8d883fd --- /dev/null +++ b/tools/exceltoyaml_control.py @@ -0,0 +1,68 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- +# @Time : 2022/6/2 22:43 +# @Author : 郭林莉 + + +#!/usr/bin/python3 + +import xlrd +from config.settings import ConfigHandler +from ruamel.yaml import YAML +import os + +yaml = YAML() + + +def read_xls(filename): + + # 打开Excel文件 + data = xlrd.open_workbook(filename) + # 读取第一个工作表和表名 + table = data.sheets()[0] + tablesheet = data.sheet_names()[0] + # 统计行数 + rows = table.nrows + data = {} # 存放数据 + ymal_dirmctory = ConfigHandler.data_path + f'{tablesheet}' + + for v in range(1, rows): + values = table.row_values(v) + excel_yaml_path = str(values[10]) + print("excel_yaml_path",excel_yaml_path) + yaml_path = ymal_dirmctory+excel_yaml_path[:-2]+'.yaml' + # print('ymal_dirmctory', ymal_dirmctory) + # print('yaml_path', yaml_path) + if not os.path.exists(ymal_dirmctory): + os.mkdir(ymal_dirmctory) + if str(values[0]) == '同上': + pass + else: + data["case_common"]=eval(values[0]) + case_name = excel_yaml_path[1:] + print('名字',case_name) + yamlassert = eval(values[11]) + if values[7] == '': + data[case_name]={ + "host": str(values[1]), + "url": str(values[2]), # 这里我只需要字符型数据,加了str(),根据实际自己取舍 + "method": str(values[3]), + "detail": str(values[4]), + "headers": eval(values[5]), + "requestType": str(values[6]), + "is_run": True, + "data": eval(values[8]), + "assert": yamlassert, + 'sql': "" + } + + with open(yaml_path, mode='w', encoding='utf-8') as file: + yaml.dump(data, file) + return data + + + +dmdata = read_xls(ConfigHandler.file_path + r'\stp.xlsx') +print(dmdata) +# print(jsonpath.jsonpath(dddddd,"$..yamlpath")) + diff --git a/tools/feishu_control.py b/tools/feishu_control.py new file mode 100644 index 0000000..0b72903 --- /dev/null +++ b/tools/feishu_control.py @@ -0,0 +1,516 @@ +from json import JSONDecodeError +import requests +import json +import logging +import time +import urllib3 +import datetime +import os +import yaml.scanner +import argparse + +urllib3.disable_warnings() + +try: + JSONDecodeError = json.decoder.JSONDecodeError +except AttributeError: + JSONDecodeError = ValueError + + +class ConfigHandler: + # 项目路径 + root_path = os.path.dirname(os.path.dirname(os.path.abspath(__file__))) + + config_path = os.path.join(root_path, 'config', 'conf.yaml') + + report_path = os.path.join(root_path, 'report') + + +class AllureFileClean: + """allure 报告数据清洗,提取业务需要得数据""" + + def __init__(self): + pass + + @classmethod + def _get_al_files(cls) -> list: + """ 获取所有 test-case 中的 json 文件 """ + filename = [] + # 获取所有文件下的子文件名称 + for root, dirs, files in os.walk(ConfigHandler.report_path + '/html/data/test-cases'): + for filePath in files: + path = os.path.join(root, filePath) + filename.append(path) + return filename + + def get_test_cases(self): + """ 获取所有 allure 报告中执行用例的情况""" + # 将所有数据都收集到files中 + files = [] + for i in self._get_al_files(): + with open(i, 'r', encoding='utf-8') as fp: + date = json.load(fp) + files.append(date) + return files + + def get_failed_case(self): + """ 获取到所有失败的用例标题和用例代码路径""" + error_cases = [] + for i in self.get_test_cases(): + if i['status'] == 'failed' or i['status'] == 'broken': + error_cases.append((i['name'], i['fullName'])) + return error_cases + + def get_failed_cases_detail(self): + """ 返回所有失败的测试用例相关内容 """ + date = self.get_failed_case() + # 判断有失败用例,则返回内容 + if len(date) >= 1: + values = "失败用例:\n" + values += " **********************************\n" + for i in date: + values += " " + i[0] + ":" + i[1] + "\n" + return values + else: + # 如果没有失败用例,则返回False + return "" + + @classmethod + def get_case_count(cls): + """ 统计用例数量 """ + file_name = ConfigHandler.report_path + '/html/history/history-trend.json' + with open(file_name, 'r', encoding='utf-8') as fp: + date = json.load(fp)[0]['data'] + return date + + +class CaseCount: + def __init__(self): + self.AllureData = AllureFileClean() + + def pass_count(self): + """用例成功数""" + return self.AllureData.get_case_count()['passed'] + + def failed_count(self): + """用例失败数""" + return self.AllureData.get_case_count()['failed'] + + def broken_count(self): + """用例异常数""" + return self.AllureData.get_case_count()['broken'] + + def skipped_count(self): + """用例跳过数""" + return self.AllureData.get_case_count()['skipped'] + + def total_count(self): + """用例总数""" + return self.AllureData.get_case_count()['total'] + + def pass_rate(self): + """用例成功率""" + # 四舍五入,保留2位小数 + try: + pass_rate = round((self.pass_count() + self.skipped_count()) / self.total_count() * 100, 2) + return pass_rate + except ZeroDivisionError: + return 0.00 + + +class GetYamlData: + + def __init__(self, file_dir): + self.fileDir = file_dir + + def get_yaml_data(self) -> dict: + """ + 获取 yaml 中的数据 + :param: fileDir: + :return: + """ + # 判断文件是否存在 + if os.path.exists(self.fileDir): + data = open(self.fileDir, 'r', encoding='utf-8') + try: + res = yaml.load(data, Loader=yaml.FullLoader) + return res + except UnicodeDecodeError: + raise ValueError(f"yaml文件编码错误,文件路径:{self.fileDir}") + + else: + raise FileNotFoundError("文件路径不存在") + + def write_yaml_data(self, key: str, value) -> int: + """ + 更改 yaml 文件中的值 + :param key: 字典的key + :param value: 写入的值 + :return: + """ + with open(self.fileDir, 'r', encoding='utf-8') as f: + # 创建了一个空列表,里面没有元素 + lines = [] + for line in f.readlines(): + if line != '\n': + lines.append(line) + f.close() + + with open(self.fileDir, 'w', encoding='utf-8') as f: + flag = 0 + for line in lines: + left_str = line.split(":")[0] + if key == left_str and '#' not in line: + newline = "{0}: {1}".format(left_str, value) + line = newline + f.write('%s\n' % line) + flag = 1 + else: + f.write('%s' % line) + f.close() + return flag + + +class Config: + '''' + 测试环境 : CI_ENVIRONMENT_SLUG + 飞书通知: webhook + GL_JOB_ID :GL_JOB_ID + test_user : GITLAB_USER_NAME + ''' + def __getattr__(self, attr): + return os.environ[attr] + + +def is_not_null_and_blank_str(content): + """ + 非空字符串 + :param content: 字符串 + :return: 非空 - True,空 - False + """ + if content and content.strip(): + return True + else: + return False + + +class FeiShuTalkChatBot(object): + """飞书机器人通知""" + def __init__(self): + self.job_id = str(Config().__getattr__("GL_JOB_ID")) + self.timeStamp = str(round(time.time() * 1000)) + self.devConfig = ConfigHandler() + # 从yaml文件中获取钉钉配置信息 + self.name = str(GetYamlData(ConfigHandler.config_path).get_yaml_data()['ProjectName']) + self.test_name = Config().__getattr__("GITLAB_USER_NAME") + try: + self.host = Config().__getattr__("DAV_ENVIRONMENT_SLUG") + except: + self.host = "默认分支环境" + Config().__getattr__("CI_ENVIRONMENT_SLUG") + self.tester = GetYamlData(ConfigHandler.config_path).get_yaml_data() + self.allure_data = CaseCount() + self.PASS = self.allure_data.pass_count() + self.FAILED = self.allure_data.failed_count() + self.BROKEN = self.allure_data.broken_count() + self.SKIP = self.allure_data.skipped_count() + self.TOTAL = self.allure_data.total_count() + self.RATE = self.allure_data.pass_rate() + self.ALL_CASE = self.PASS + self.FAILED + self.BROKEN + self.Except_case = self.BROKEN+self.FAILED + + self.headers = {'Content-Type': 'application/json; charset=utf-8'} + self.devConfig = ConfigHandler() + # self.getFeiShuTalk = GetYamlData(self.devConfig.config_path).get_yaml_data()['FeiShuTalk'] + + + def getwebhook(self): + try: + testing_evn = Config().__getattr__("DAV_ENVIRONMENT_SLUG") + if "-" in testing_evn: + list_evn = testing_evn.split("-") + cur_evn = list_evn[0] + if cur_evn == 'prod': + webhook = Config().__getattr__("webhook") + else: + webhook = Config().__getattr__("testwebhook") + else: + if testing_evn == 'prod': + webhook = Config().__getattr__("webhook") + else: + webhook = Config().__getattr__("testwebhook") + except: + cur_evn = Config().__getattr__("CI_ENVIRONMENT_SLUG") + if cur_evn == 'prod': + webhook = Config().__getattr__("webhook") + else: + webhook = Config().__getattr__("testwebhook") + return webhook + + def send_text(self, msg: str): + """ + 消息类型为text类型 + :param msg: 消息内容 + :return: 返回消息发送结果 + """ + data = {"msg_type": "text", "at": {}} + if is_not_null_and_blank_str(msg): # 传入msg非空 + data["content"] = {"text": msg} + else: + logging.error("text类型,消息内容不能为空!") + raise ValueError("text类型,消息内容不能为空!") + + logging.debug('text类型:%s' % data) + return self.post() + + def error_feishu(self, error_message): + """ + 发送消息(内容UTF-8编码) + :return: 返回消息发送结果 + """ + rich_text = { + "email": "fanlv@bytedance.com", + "msg_type": "post", + "content": { + "post": { + "zh_cn": { + "title": "【接口自动化执行异常通知】", + "content": [ + + [{ + "tag": "text", + "text": "接口自动化执行异常,错误如下请关注 : " + }, + { + "tag": "text", + "text": "{0}".format(error_message) + }], # 成功率 + + ] + } + } + } + } + try: + post_data = json.dumps(rich_text) + response = requests.post(self.getwebhook(), headers=self.headers, data=post_data, verify=False) + except requests.exceptions.HTTPError as exc: + logging.error("消息发送失败, HTTP error: %d, reason: %s" % (exc.response.status_code, exc.response.reason)) + raise + except requests.exceptions.ConnectionError: + logging.error("消息发送失败,HTTP connection error!") + raise + except requests.exceptions.Timeout: + logging.error("消息发送失败,Timeout error!") + raise + except requests.exceptions.RequestException: + logging.error("消息发送失败, Request Exception!") + raise + else: + try: + result = response.json() + except JSONDecodeError: + logging.error("服务器响应异常,状态码:%s,响应内容:%s" % (response.status_code, response.text)) + return {'errcode': 500, 'errmsg': '服务器响应异常'} + else: + logging.debug('发送结果:%s' % result) + # 消息发送失败提醒(errcode 不为 0,表示消息发送异常),默认不提醒,开发者可以根据返回的消息发送结果自行判断和处理 + if result.get('StatusCode') != 0: + time_now = time.strftime("%Y-%m-%d %H:%M:%S", time.localtime(time.time())) + error_data = { + "msgtype": "text", + "text": { + "content": "[注意-自动通知]飞书机器人消息发送失败,时间:%s,原因:%s,请及时跟进,谢谢!" % ( + time_now, result['errmsg'] if result.get('errmsg', False) else '未知异常') + }, + "at": { + "isAtAll": False + } + } + logging.error("消息发送失败,自动通知:%s" % error_data) + requests.post(self.getwebhook(), headers=self.headers, data=json.dumps(error_data)) + return result + + # 判断 如果错误与异常相加大于1 @all,如果没有@寂寞 + def exce_case(self): + if self.Except_case >= 1: + return 'all' + else: + # userId c1c916dg + return '郭林莉' + + + + + def get_commit(self): + com = '' + try: + if Config().__getattr__("DAV_COMMIT_ID"): + com = f'(全量回归{Config().__getattr__("DAV_COMMIT_ID")})' + except KeyError: + com = '(巡检)' + return com + + def post(self): + """ + 发送消息(内容UTF-8编码) + :return: 返回消息发送结果 + """ + rich_text = { + "email": "fanlv@bytedance.com", + "msg_type": "post", + "content": { + "post": { + "zh_cn": { + "title": self.name+ self.get_commit(), + "content": [ + [ + { + "tag": "a", + "text": "测试报告", + "href": "https://davinci-rnd.pages.davincimotor.com/-/testing/davinci_dm_api/-/jobs/{0}/artifacts/report/pytest_html/result.html".format(self.job_id) + }, + + { + "tag": "at", + "user_id": self.exce_case() + # "text":"陈锐男" + } + ], + [ + { + "tag": "text", + "text": "测试 人员 : " + }, + { + "tag": "text", + "text": "{testname}".format(testname=self.test_name) + } + ], + [ + { + "tag": "text", + "text": "运行 环境 : " + }, + { + "tag": "text", + "text": "{host}".format(host=str(self.host)) + } + ], + [{ + "tag": "text", + "text": "成 功 率 : " + }, + { + "tag": "text", + "text": "{rate}".format(rate=self.RATE) + " %" + }], # 成功率 + [{ + "tag": "text", + "text": "总用例条数 : " + }, + { + "tag": "text", + "text": "{failed}".format(failed=self.ALL_CASE) + }], + + [{ + "tag": "text", + "text": "成功用例数 : " + }, + { + "tag": "text", + "text": "{total}".format(total=self.PASS) + }], # 成功用例数 + + [{ + "tag": "text", + "text": "失败用例数 : " + }, + { + "tag": "text", + "text": "{failed}".format(failed=self.FAILED) + }], # 失败用例数 + [{ + "tag": "text", + "text": "跳过用例数 : " + }, + { + "tag": "text", + "text": "{skip}".format(skip=self.SKIP) + }], + [{ + "tag": "text", + "text": "异常用例数 : " + }, + { + "tag": "text", + "text": "{failed}".format(failed=self.BROKEN) + }], # 损坏用例数 + [ + { + "tag": "text", + "text": "时 间 : " + }, + { + "tag": "text", + "text": "{test}".format(test=(datetime.datetime.now() + datetime.timedelta(hours=8)).strftime('%Y-%m-%d %H:%M:%S')) + } + ], + + # [ + # { + # "tag": "img", + # "image_key": "d640eeea-4d2f-4cb3-88d8-c964fab53987", + # "width": 300, + # "height": 300 + # } + # ] + ] + } + } + } + } + try: + post_data = json.dumps(rich_text) + response = requests.post(self.getwebhook(), headers=self.headers, data=post_data, verify=False) + except requests.exceptions.HTTPError as exc: + logging.error("消息发送失败, HTTP error: %d, reason: %s" % (exc.response.status_code, exc.response.reason)) + raise + except requests.exceptions.ConnectionError: + logging.error("消息发送失败,HTTP connection error!") + raise + except requests.exceptions.Timeout: + logging.error("消息发送失败,Timeout error!") + raise + except requests.exceptions.RequestException: + logging.error("消息发送失败, Request Exception!") + raise + else: + try: + result = response.json() + except JSONDecodeError: + logging.error("服务器响应异常,状态码:%s,响应内容:%s" % (response.status_code, response.text)) + return {'errcode': 500, 'errmsg': '服务器响应异常'} + else: + logging.debug('发送结果:%s' % result) + # 消息发送失败提醒(errcode 不为 0,表示消息发送异常),默认不提醒,开发者可以根据返回的消息发送结果自行判断和处理 + if result.get('StatusCode') != 0: + time_now = time.strftime("%Y-%m-%d %H:%M:%S", time.localtime(time.time())) + error_data = { + "msgtype": "text", + "text": { + "content": "[注意-自动通知]飞书机器人消息发送失败,时间:%s,原因:%s,请及时跟进,谢谢!" % ( + time_now, result['errmsg'] if result.get('errmsg', False) else '未知异常') + }, + "at": { + "isAtAll": False + } + } + logging.error("消息发送失败,自动通知:%s" % error_data) + requests.post(self.getwebhook(), headers=self.headers, data=json.dumps(error_data)) + return result + + +if __name__ == '__main__': + send = FeiShuTalkChatBot() + send.post() diff --git a/tools/get_all_files_path.py b/tools/get_all_files_path.py new file mode 100644 index 0000000..549b19f --- /dev/null +++ b/tools/get_all_files_path.py @@ -0,0 +1,26 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- +# @Time : 2022/3/28 13:22 +# @Author : 郭林莉 +import os + + +def get_all_files(file_path, yaml_data_switch=False) -> list: + """ + 获取文件路径 + :param file_path: 目录路径 + :param yaml_data_switch: 是否过滤文件为 yaml格式, True则过滤 + :return: + """ + filename = [] + # 获取所有文件下的子文件名称 + for root, dirs, files in os.walk(file_path): + for filePath in files: + path = os.path.join(root, filePath) + if yaml_data_switch: + if 'yaml' in path or '.yml' in path: + filename.append(path) + else: + filename.append(path) + return filename + diff --git a/tools/gettime_control.py b/tools/gettime_control.py new file mode 100644 index 0000000..43ada5f --- /dev/null +++ b/tools/gettime_control.py @@ -0,0 +1,79 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- + + +import time +from datetime import datetime + + +def count_milliseconds(): + """ + 计算时间 + :return: + """ + access_start = datetime.now() + access_end = datetime.now() + access_delta = (access_end - access_start).seconds * 1000 + return access_delta + + +def timestamp_conversion(time_str: str) -> int: + """ + 时间戳转换,将日期格式转换成时间戳 + :param time_str: 时间 + :return: + """ + + try: + datetime_format = datetime.strptime(str(time_str), "%Y-%m-%d %H:%M:%S") + timestamp = int(time.mktime(datetime_format.timetuple()) * 1000.0 + datetime_format.microsecond / 1000.0) + return timestamp + except ValueError: + raise ValueError('日期格式错误, 需要传入得格式为 "%Y-%m-%d %H:%M:%S" ') + + +def time_conversion(time_num: int): + """ + 时间戳转换成日期 + :param time_num: + :return: + """ + if isinstance(time_num, int): + time_stamp = float(time_num / 1000) + time_array = time.localtime(time_stamp) + other_style_time = time.strftime("%Y-%m-%d %H:%M:%S", time_array) + return other_style_time + + else: + raise ValueError("请传入正确的时间戳") + + +def now_time() -> str: + """ + 获取当前时间, 日期格式: 2021-12-11 12:39:25 + :return: + """ + localtime = time.strftime("%Y-%m-%d %H:%M:%S", time.localtime()) + return localtime + + +def get_time_for_min(minute: int) -> int: + """ + 获取几分钟后的时间戳 + @param minute: 分钟 + @return: N分钟后的时间戳 + """ + return int(time.time() + 60 * minute) * 1000 + + +def get_now_time() -> int: + """ + 获取当前时间戳, 整形 + @return: 当前时间戳 + """ + return int(time.time()) * 1000 + + +if __name__ == '__main__': + print(now_time()) + time_conversion(1547450538000) diff --git a/tools/jsonpath.py b/tools/jsonpath.py new file mode 100644 index 0000000..5067d30 --- /dev/null +++ b/tools/jsonpath.py @@ -0,0 +1,353 @@ +""" +An XPath for JSON + +A port of the Perl, and JavaScript versionsmanager of JSONPath +see http://goessner.net/articles/JsonPath/ + +Based on on JavaScript versionsmanager by Stefan Goessner at: + https://goessner.net/articles/JsonPath/ + http://code.google.com/p/jsonpath/ +and Perl versionsmanager by Kate Rhodes at: + http://github.com/masukomi/jsonpath-perl/tree/master + +Python3 compatibily by Per J. Sandstrom +""" +from __future__ import print_function + +__author__ = "Phil Budne" +__revision__ = "$Revision: 1.17 $" +__version__ = '0.82' + +# Copyright (c) 2007 Stefan Goessner (goessner.net) +# Copyright (c) 2008 Kate Rhodes (masukomi.org) +# Copyright (c) 2008-2018 Philip Budne (ultimate.com) +# Licensed under the MIT licence: +# +# Permission is hereby granted, free of charge, to any person +# obtaining a copy of this software and associated documentation +# files (the "Software"), to deal in the Software without +# restriction, including without limitation the rights to use, +# copy, modify, merge, publish, distribute, sublicense, and/or sell +# copies of the Software, and to permit persons to whom the +# Software is furnished to do so, subject to the following +# conditions: +# +# The above copyright notice and this permission notice shall be +# included in all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES +# OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT +# HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, +# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR +# OTHER DEALINGS IN THE SOFTWARE. + +import re +import sys + +# XXX BUGS: +# evalx is generally a crock: +# handle !@.name.name??? +# there are probably myriad unexpected ways to get an exception: +# wrap initial "trace" call in jsonpath body in a try/except?? + +# XXX TODO: +# internally keep paths as lists to preserve integer types +# (instead of as ';' delimited strings) + +__all__ = [ 'jsonpath' ] + +# XXX precompile RE objects on load??? +# re_1 = re.compile(.....) +# re_2 = re.compile(.....) + +# For python3 portability +if sys.version_info[0] == 3: + xrange = range + + +def normalize(x): + """normalize the path expression; outside jsonpath to allow testing""" + subx = [] + + # replace index/filter expressions with placeholders + # Python anonymous functions (lambdas) are cryptic, hard to debug + def f1(m): + n = len(subx) # before append + g1 = m.group(1) + subx.append(g1) + ret = "[#%d]" % n +# print("f1:", g1, ret) + return ret + x = re.sub(r"[\['](\??\(.*?\))[\]']", f1, x) + + # added the negative lookbehind -krhodes + x = re.sub(r"'?(? 1: print("\tf03", key, loc, expr, path) + trace(s(key, expr), obj, path) + walk(loc, x, obj, path, f03) + elif loc == "..": + trace(x, obj, path) + def f04(key, loc, expr, obj, path): + if debug > 1: print("\tf04", key, loc, expr, path) + if isinstance(obj, dict): + if key in obj: + trace(s('..', expr), obj[key], s(path, key)) + else: + if key < len(obj): + trace(s('..', expr), obj[key], s(path, key)) + walk(loc, x, obj, path, f04) + elif loc == "!": + # Perl jsonpath extension: return keys + def f06(key, loc, expr, obj, path): + if isinstance(obj, dict): + trace(expr, key, path) + walk(loc, x, obj, path, f06) + elif isinstance(obj, dict) and loc in obj: + trace(x, obj[loc], s(path, loc)) + elif isinstance(obj, list) and isint(loc): + iloc = int(loc) + if debug: print("----->", iloc, len(obj)) + if len(obj) > iloc: + trace(x, obj[iloc], s(path, loc)) + else: + # [(index_expression)] + if loc.startswith("(") and loc.endswith(")"): + if debug > 1: print("index", loc) + e = evalx(loc, obj) + trace(s(e,x), obj, path) + return + + # ?(filter_expression) + if loc.startswith("?(") and loc.endswith(")"): + if debug > 1: print("filter", loc) + def f05(key, loc, expr, obj, path): + if debug > 1: print("f05", key, loc, expr, path) + if isinstance(obj, dict): + eval_result = evalx(loc, obj[key]) + else: + eval_result = evalx(loc, obj[int(key)]) + if eval_result: + trace(s(key, expr), obj, path) + + loc = loc[2:-1] + walk(loc, x, obj, path, f05) + return + + m = re.match(r'(-?[0-9]*):(-?[0-9]*):?(-?[0-9]*)$', loc) + if m: + if isinstance(obj, (dict, list)): + def max(x,y): + if x > y: + return x + return y + + def min(x,y): + if x < y: + return x + return y + + objlen = len(obj) + s0 = m.group(1) + s1 = m.group(2) + s2 = m.group(3) + + # XXX int("badstr") raises exception + start = int(s0) if s0 else 0 + end = int(s1) if s1 else objlen + step = int(s2) if s2 else 1 + + if start < 0: + start = max(0, start+objlen) + else: + start = min(objlen, start) + if end < 0: + end = max(0, end+objlen) + else: + end = min(objlen, end) + + for i in xrange(start, end, step): + trace(s(i, x), obj, path) + return + + # after (expr) & ?(expr) + if loc.find(",") >= 0: + # [index,index....] + for piece in re.split(r"'?,'?", loc): + if debug > 1: print("piece", piece) + trace(s(piece, x), obj, path) + else: + store(path, obj) + + def walk(loc, expr, obj, path, funct): + if isinstance(obj, list): + for i in xrange(0, len(obj)): + funct(i, loc, expr, obj, path) + elif isinstance(obj, dict): + for key in obj: + funct(key, loc, expr, obj, path) + + def evalx(loc, obj): + """eval expression""" + + if debug: print("evalx", loc) + + # a nod to JavaScript. doesn't work for @.name.name.length + # Write len(@.name.name) instead!!! + loc = loc.replace("@.length", "len(__obj)") + + loc = loc.replace("&&", " and ").replace("||", " or ") + + # replace !@.name with 'name' not in obj + # XXX handle !@.name.name.name.... + def notvar(m): + return "'%s' not in __obj" % m.group(1) + loc = re.sub("!@\.([a-zA-Z@_0-9-]*)", notvar, loc) + + # replace @.name.... with __obj['name'].... + # handle @.name[.name...].length + def varmatch(m): + def brackets(elts): + ret = "__obj" + for e in elts: + if isint(e): + ret += "[%s]" % e # ain't necessarily so + else: + ret += "['%s']" % e # XXX beware quotes!!!! + return ret + g1 = m.group(1) + elts = g1.split('.') + if elts[-1] == "length": + return "len(%s)" % brackets(elts[1:-1]) + return brackets(elts[1:]) + + loc = re.sub(r'(? == translation + # causes problems if a string contains = + + # replace @ w/ "__obj", but \@ means a literal @ + loc = re.sub(r'(?", v) + return v + + # body of jsonpath() + + # Get caller globals so eval can pick up user functions!!! + caller_globals = sys._getframe(1).f_globals + result = [] + if expr and obj: + cleaned_expr = normalize(expr) + if cleaned_expr.startswith("$;"): + cleaned_expr = cleaned_expr[2:] + + # XXX wrap this in a try?? + trace(cleaned_expr, obj, '$') + + if len(result) > 0: + return result + return False + +if __name__ == '__main__': + try: + import json # v2.6 + except ImportError: + import simplejson as json + + import sys + + # XXX take options for output format, output file, debug level + + if len(sys.argv) < 3 or len(sys.argv) > 4: + sys.stdout.write("Usage: jsonpath.py FILE PATH [OUTPUT_TYPE]\n") + sys.exit(1) + + object = json.load(file(sys.argv[1])) + path = sys.argv[2] + format = 'VALUE' + + if len(sys.argv) > 3: + # XXX verify? + format = sys.argv[3] + + value = jsonpath(object, path, format) + + if not value: + sys.exit(1) + + f = sys.stdout + json.dump(value, f, sort_keys=True, indent=1) + f.write("\n") + + sys.exit(0) diff --git a/tools/jsonpath_date_replace.py b/tools/jsonpath_date_replace.py new file mode 100644 index 0000000..36bdda8 --- /dev/null +++ b/tools/jsonpath_date_replace.py @@ -0,0 +1,14 @@ +#!/usr/bin/python3 +# -*- coding: utf-8 -*- + +def jsonpath_replace(change_data, key_name): + """处理jsonpath数据""" + _new_data = key_name + '' + for i in change_data: + if i == '$': + pass + elif i[0] == '[' and i[-1] == ']': + _new_data += "[" + i[1:-1] + "]" + else: + _new_data += "[" + "'" + i + "'" + "]" + return _new_data diff --git a/tools/log_control.py b/tools/log_control.py new file mode 100644 index 0000000..635b7dc --- /dev/null +++ b/tools/log_control.py @@ -0,0 +1,86 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- + + +import logging +from logging import handlers +import colorlog +from config.settings import ConfigHandler + + +class LogHandler(object): + # 日志级别关系映射 + level_relations = { + 'debug': logging.DEBUG, + 'info': logging.INFO, + 'warning': logging.WARNING, + 'error': logging.ERROR, + 'crit': logging.CRITICAL + } + + def __init__(self, filename, level='info', when='D', back_count=3, fmt='%(levelname)-8s%(asctime)s%(name)s:%(''filename)s:%(lineno)d %(message)s'): + # 初始化获取logger对象,日志格式里面的%(name)s 就是filename + self.logger = logging.getLogger(filename) + # 定义不同日志等级颜色 + self.log_colors_config = { + 'DEBUG': 'cyan', + 'INFO': 'green', + 'WARNING': 'yellow', + 'ERROR': 'red', + 'CRITICAL': 'red', + } + ''' + colorlog.ColoredFormatter是一个Python logging模块的格式化,用于在终端输出日志的颜色 + 日志内容格式,输出在屏幕 + asctime:日志时间 + name:日志收集器的名字 + levelname:文本形式的日志级别 + message: 用户输出的消息 + ''' + formatter = colorlog.ColoredFormatter('%(log_color)s[%(asctime)s] [%(name)s] [%(levelname)s]: %(message)s',log_colors=self.log_colors_config) + # 往屏幕上输出,设置屏幕上显示的格式 + sh = logging.StreamHandler() + sh.setFormatter(formatter) + self.logger.addHandler(sh) + + + # 设置文件日志格式和日志级别 + format_str = logging.Formatter(fmt) + self.logger.setLevel(self.level_relations.get(level)) + # 往文件里写入#指定间隔时间自动生成文件的处理器 + th = handlers.TimedRotatingFileHandler(filename=filename, when=when, backupCount=back_count, encoding='utf-8') + """ + #实例化TimedRotatingFileHandler + #interval是时间间隔,backupCount是备份文件的个数,如果超过这个个数,就会自动删除,when是间隔的时间单位,单位有以下几种: + # S 秒 + # M 分 + # H 小时、 + # D 天、 + # W 每星期(interval==0时代表星期一) + # midnight 每天凌晨 + """ + # 设置文件里写入的格式 + th.setFormatter(format_str) + # 把对象加到logger里 + self.logger.addHandler(th) + self.log_path = ConfigHandler.log_path + + +INFO = LogHandler(ConfigHandler.info_log_path, level='info') +ERROR = LogHandler(ConfigHandler.error_log_path, level='error') +WARNING = LogHandler(ConfigHandler.warning_log_path, level='warning') + +if __name__ == '__main__': + INFO.logger.info("测试") + msg = '111' + + + log = LogHandler("../logs/info.log", level='debug') + log.logger.debug('debug') + log.logger.info(msg) + log.logger.warning('警告') + log.logger.error('报错') + log.logger.critical('严重') + LogHandler('../logs/error.log', level='error').logger.error('error') + # is_open = ReadIni(node='log').get_value("run") + INFO.logger.info("111") diff --git a/tools/log_decorator.py b/tools/log_decorator.py new file mode 100644 index 0000000..6c2c4a2 --- /dev/null +++ b/tools/log_decorator.py @@ -0,0 +1,77 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- +from functools import wraps +from tools.log_control import INFO, WARNING + + +def log_decorator(switch: bool): + """ + 封装日志装饰器, 打印请求信息 + :param switch: 定义日志开关 + :return: + """ + # 判断参数类型是否是 int 类型 + if isinstance(switch, bool): + def decorator(func): + @wraps(func) + def swapper(*args, **kwargs): + # 判断日志为开启状态,才打印日志 + res = func(*args, **kwargs) + # 判断日志开关为开启状态 + if switch: + if res is not None: + _is_run = res['yaml_data']['is_run'] + # 判断正常打印的日志,控制台输出绿色 + if _is_run is None or _is_run is True: + INFO.logger.info( + "\n===================================" + "==============================================\n" + "测试标题: %s\n" + "请求方式: %s\n" + "请求头: %s\n" + "请求路径: %s\n" + "请求内容: %s\n" + "接口响应内容: %s\n" + "数据库断言数据: %s\n" + "================================" + "=================================================", + res['yaml_data']['detail'], + res['yaml_data']['method'], + res['yaml_data']['headers'], + res['yaml_data']['url'], + res['yaml_data']['data'], + res['response_data'], + res['sql_data'] + ) + else: + # 跳过执行的用例,控制台输出黄色 + WARNING.logger.warning( + "\n================================" + "=================================================\n" + "该条用例跳过执行.\n" + "测试标题: %s\n" + "请求方式: %s\n" + "请求头: %s\n" + "请求路径: %s\n" + "请求内容: %s\n" + "接口响应内容: %s\n" + "数据库断言数据: %s\n" + "================================" + "=================================================", + res['yaml_data']['detail'], + res['yaml_data']['method'], + res['yaml_data']['headers'], + res['yaml_data']['url'], + res['yaml_data']['data'], + res['response_data'], + res['sql_data'] + ) + res_data = res + else: + res_data = res + return res_data + return swapper + else: + raise TypeError("日志开关只能为 Ture 或者 False") + + return decorator diff --git a/tools/mysql_control.py b/tools/mysql_control.py new file mode 100644 index 0000000..0b2a75a --- /dev/null +++ b/tools/mysql_control.py @@ -0,0 +1,122 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- + + +import pymysql +from warnings import filterwarnings +from tools.yaml_control import GetYamlData +from tools.log_control import ERROR +from tools.yaml_control import GetCaseData +from config.settings import ConfigHandler +from tools.regular_control import sql_regular + +# 忽略 Mysql 告警信息 +filterwarnings("ignore", category=pymysql.Warning) + +switch = GetCaseData(ConfigHandler.config_path).get_yaml_data()['MySqlDB']['switch'] + + +class MysqlDB(object): + if switch: + + def __init__(self): + self.config = GetYamlData(ConfigHandler.config_path) + self.read_mysql_config = self.config.get_yaml_data()['MySqlDB'] + + try: + # 建立数据库连接 + self.conn = pymysql.connect( + host=self.read_mysql_config['host'], + user=self.read_mysql_config['user'], + password=self.read_mysql_config['password'], + db=self.read_mysql_config['db'] + ) + + # 使用 cursor 方法获取操作游标,得到一个可以执行sql语句,并且操作结果为字典返回的游标 + self.cur = self.conn.cursor(cursor=pymysql.cursors.DictCursor) + except Exception as e: + ERROR.logger.error("数据库连接失败,失败原因{0}".format(e)) + + def __del__(self): + try: + # 关闭游标 + self.cur.close() + # 关闭连接 + self.conn.close() + except Exception as e: + ERROR.logger.error("数据库连接失败,失败原因{0}".format(e)) + + def query(self, sql, state="all"): + """ + 查询 + :param sql: + :param state: all 是默认查询全部 + :return: + """ + try: + self.cur.execute(sql) + + if state == "all": + # 查询全部 + data = self.cur.fetchall() + + else: + # 查询单条 + data = self.cur.fetchone() + + return data + except Exception as e: + ERROR.logger.error("数据库连接失败,失败原因{0}".format(e)) + + def execute(self, sql): + """ + 更新 、 删除、 新增 + :param sql: + :return: + """ + try: + # 使用 execute 操作 sql + rows = self.cur.execute(sql) + # 提交事务 + self.conn.commit() + return rows + except Exception as e: + ERROR.logger.error("数据库连接失败,失败原因{0}".format(e)) + # 如果事务异常,则回滚数据 + self.conn.rollback() + + def assert_execution(self, sql: list, resp) -> dict: + """ + 执行 sql, 负责处理 yaml 文件中的断言需要执行多条 sql 的场景,最终会将所有数据以对象形式返回 + :param resp: 接口响应数据 + :param sql: sql + :return: + """ + try: + if isinstance(sql, list): + + data = {} + if 'UPDATE' and 'update' and 'DELETE' and 'delete' and 'INSERT' and 'insert' in sql: + raise ValueError("断言的 sql 必须是查询的 sql") + else: + for i in sql: + # 判断sql中是否有正则,如果有则通过jsonpath提取相关的数据 + sql = sql_regular(i, resp) + # for 循环逐条处理断言 sql + query_data = self.query(sql)[0] + # 将sql 返回的所有内容全部放入对象中 + for key, value in query_data.items(): + data[key] = value + + return data + else: + raise ValueError("断言的查询sql需要是list类型") + except Exception as e: + ERROR.logger.error("数据库连接失败,失败原因{0}".format(e)) + raise + + +if __name__ == '__main__': + mysql_db = MysqlDB() + a = mysql_db.assert_execution(sql=[""], resp={"code": 237, "value": 1}) + print(a) diff --git a/tools/officemail_control.py b/tools/officemail_control.py new file mode 100644 index 0000000..b72d5bf --- /dev/null +++ b/tools/officemail_control.py @@ -0,0 +1,261 @@ +# !/usr/bin/env python3 +# coding: utf-8 + + +import smtplib # 加载smtplib模块 +from email.mime.text import MIMEText +from email.utils import formataddr +from email.mime.multipart import MIMEMultipart +import os +import json +import yaml.scanner +from email.mime.application import MIMEApplication +import datetime + + +class ConfigHandler: + # 项目路径 + root_path = os.path.dirname(os.path.dirname(os.path.abspath(__file__))) + + config_path = os.path.join(root_path, 'config', 'conf.yaml') + + report_path = os.path.join(root_path, 'report') + + +class AllureFileClean: + """allure 报告数据清洗,提取业务需要得数据""" + + def __init__(self): + pass + + @classmethod + def _get_al_files(cls) -> list: + """ 获取所有 test-case 中的 json 文件 """ + filename = [] + # 获取所有文件下的子文件名称 + for root, dirs, files in os.walk(ConfigHandler.report_path + '/html/data/test-cases'): + for filePath in files: + path = os.path.join(root, filePath) + filename.append(path) + return filename + + def get_test_cases(self): + """ 获取所有 allure 报告中执行用例的情况""" + # 将所有数据都收集到files中 + files = [] + for i in self._get_al_files(): + with open(i, 'r', encoding='utf-8') as fp: + date = json.load(fp) + files.append(date) + return files + + def get_failed_case(self): + """ 获取到所有失败的用例标题和用例代码路径""" + error_cases = [] + for i in self.get_test_cases(): + if i['status'] == 'failed' or i['status'] == 'broken': + error_cases.append((i['name'], i['fullName'])) + return error_cases + + def get_failed_cases_detail(self): + """ 返回所有失败的测试用例相关内容 """ + date = self.get_failed_case() + # 判断有失败用例,则返回内容 + if len(date) >= 1: + values = "失败用例:\n" + values += " **********************************\n" + for i in date: + values += " " + i[0] + ":" + i[1] + "\n" + return values + else: + # 如果没有失败用例,则返回False + return "" + + @classmethod + def get_case_count(cls): + """ 统计用例数量 """ + file_name = ConfigHandler.report_path + '/html/history/history-trend.json' + with open(file_name, 'r', encoding='utf-8') as fp: + date = json.load(fp)[0]['data'] + return date + + +class GetYamlData: + + def __init__(self, file_dir): + self.fileDir = file_dir + + def get_yaml_data(self) -> dict: + """ + 获取 yaml 中的数据 + :param: fileDir: + :return: + """ + # 判断文件是否存在 + if os.path.exists(self.fileDir): + data = open(self.fileDir, 'r', encoding='utf-8') + try: + res = yaml.load(data, Loader=yaml.FullLoader) + return res + except UnicodeDecodeError: + raise ValueError(f"yaml文件编码错误,文件路径:{self.fileDir}") + + else: + raise FileNotFoundError("文件路径不存在") + + def write_yaml_data(self, key: str, value) -> int: + """ + 更改 yaml 文件中的值 + :param key: 字典的key + :param value: 写入的值 + :return: + """ + with open(self.fileDir, 'r', encoding='utf-8') as f: + # 创建了一个空列表,里面没有元素 + lines = [] + for line in f.readlines(): + if line != '\n': + lines.append(line) + f.close() + + with open(self.fileDir, 'w', encoding='utf-8') as f: + flag = 0 + for line in lines: + left_str = line.split(":")[0] + if key == left_str and '#' not in line: + newline = "{0}: {1}".format(left_str, value) + line = newline + f.write('%s\n' % line) + flag = 1 + else: + f.write('%s' % line) + f.close() + return flag + + +class CaseCount: + def __init__(self): + self.AllureData = AllureFileClean() + + def pass_count(self): + """用例成功数""" + return self.AllureData.get_case_count()['passed'] + + def failed_count(self): + """用例失败数""" + return self.AllureData.get_case_count()['failed'] + + def broken_count(self): + """用例异常数""" + return self.AllureData.get_case_count()['broken'] + + def skipped_count(self): + """用例跳过数""" + return self.AllureData.get_case_count()['skipped'] + + def total_count(self): + """用例总数""" + return self.AllureData.get_case_count()['total'] + + def pass_rate(self): + """用例成功率""" + # 四舍五入,保留2位小数 + try: + pass_rate = round((self.pass_count() + self.skipped_count()) / self.total_count() * 100, 2) + return pass_rate + except ZeroDivisionError: + return 0.00 + + +class Config: + + def __getattr__(self, attr): + return os.environ[attr] + + +class SendMail(object): + def __init__(self): + self.sys_sender = Config().__getattr__("send_user") # 系统账户 + self.sys_pwd = Config().__getattr__("send_pwd") # 系统账户密码 + self.test_name = Config().__getattr__("GITLAB_USER_NAME") + self.ProjectName = GetYamlData(ConfigHandler.config_path).get_yaml_data()['ProjectName'] + self.allure_data = CaseCount() + self.PASS = self.allure_data.pass_count() + self.FAILED = self.allure_data.failed_count() + self.BROKEN = self.allure_data.broken_count() + self.SKIP = self.allure_data.skipped_count() + self.TOTAL = self.allure_data.total_count() + self.RATE = self.allure_data.pass_rate() + self.ALL_CASE = self.PASS + self.FAILED + self.BROKEN + try: + self.host = Config().__getattr__("DAV_ENVIRONMENT_SLUG") + except: + self.host = Config().__getattr__("CI_ENVIRONMENT_SLUG") + self.job_id = str(Config().__getattr__("GL_JOB_ID")) + self.sender_list = Config().__getattr__("sender_list") + self.email_host = Config().__getattr__('email_host') + self.email_port = int(Config().__getattr__("email_port")) + + def contents(self): + current_time = (datetime.datetime.now() + datetime.timedelta(hours=8)).strftime('%Y-%m-%d %H:%M:%S') + content = """ + 各位同事, 大家好: + 自动化用例执行完成,执行结果如下: + 测试人员 : {tester_name} + 运行环境 : {host} + 运行的成功率: {rate} % + 用例运行总数: {all_case} 个 + 通过用例个数: {total} 个 + 失败用例个数: {failed} 个 + 异常用例个数: {exception_case} 个 + 跳过用例个数: {skip_case} 个 + 用例运行时间:{run_time} + ********************************** + 报告地址:'https://davinci-rnd.pages.davincimotor.com/-/testing/davinci_dm_api/-/jobs/{job_id}/artifacts/report/pytest_html/result.html' + 系统自动发送请勿回复,详细情况可报告查看,非相关负责人员可忽略此消息。谢谢。 + PS:名词解释: + 异常用例:异常是代码还没执行到assert断言就报错了 + 失败用例:失败是代码执行assert断言失败与预期不符 + 跳过用例:不在此环境执行这个接口通常用于生产环境 + """.format(tester_name=self.test_name, host=str(self.host), rate=self.RATE, all_case=self.ALL_CASE, + total=self.PASS, failed=self.FAILED, exception_case=self.BROKEN, skip_case=self.SKIP, + run_time=current_time, job_id=self.job_id) + return content + + def send(self): + """ + 发送邮件 + :return: bool + """ + try: + # 创建一个带附件的实例 + msg = MIMEMultipart() + # 发件人格式 + msg['From'] = formataddr(["", self.sys_sender]) + # 收件人格式 + sender = self.sender_list + msg['To'] = formataddr(["", sender]) + # 邮件主题 title + msg['Subject'] = str(self.ProjectName) + "(全量回归)" + + # 邮件正文内容 + msg.attach(MIMEText(SendMail().contents(), 'plain', 'utf-8')) + + # SMTP服务器 + server = smtplib.SMTP(self.email_host, int(self.email_port), timeout=10) + server.ehlo() + # TLS加密 + server.starttls() + # 登录账户 + server.login(self.sys_sender, self.sys_pwd) + # 发送邮件 + server.sendmail(self.sys_sender, sender.split(','), msg.as_string()) + # 退出账户 + server.quit() + return True + except Exception as e: + raise e + + +if __name__ == '__main__': + SendMail().send() \ No newline at end of file diff --git a/tools/regular_control.py b/tools/regular_control.py new file mode 100644 index 0000000..955544e --- /dev/null +++ b/tools/regular_control.py @@ -0,0 +1,134 @@ +import re,os +from faker import Faker +from tools.log_control import ERROR +import jsonpath +from tools.cache_control import Cache +from config.configs import Config +import time + + +class MyMetaClass(type): + def __getattr__(self, item): + return os.environ[item] + + +class MyClass(metaclass=MyMetaClass): + pass + + +class Context: + def __init__(self): + self.f = Faker(locale='zh_CN') + + @property + def get_host11(self): + #判断环境,testing,staging,prod, testing-global, staging-global,.0 + + return Config().get_host11() + + + @property + def get_female_name(self) -> str: + return self.f.name()+ str(self.get_now_time) + + @property + def get_female_phone(self) -> str: + return self.f.phone_number() + + @property + def get_password(self): + return Config().__getattr__("dm_password") + + @property + def get_now_time(self): + now = time.time() + return int(round(now * 1000)) + + + @property + def insert_vin(self): + return "zidh"+ str(self.get_now_time) + + +def regular(target) -> str: + """ + 使用正则替换请求数据 + target:yaml的用例数据 + :return: + """ + try: + # 匹配规则 + regular_pattern = r'\${{(.*?)}}' + while re.findall(regular_pattern, target): + # group(1) 列出第一个括号匹配部分,host + key = re.search(regular_pattern, target).group(1) + # print("str(getattr(Context(), key))",str(getattr(Context(), key))) + # str(getattr(Context(), key)),返回 :https://tsp.testing.davincimotor.com + try: + target = re.sub(regular_pattern, str(getattr(MyClass, key)), target, 1) + except KeyError: + target = re.sub(regular_pattern, str(getattr(Context(), key)), target, 1) + # return target + return target + except AttributeError: + ERROR.logger.error('未找到对应的替换数据,请检查数据是否存在', target) + raise + + +def sql_json(js_path, res): + return jsonpath.jsonpath(res, js_path)[0] + + +def sql_regular(value, res=None): + """ + 这里处理sql中的依赖数据,通过获取接口响应的jsonpath的值进行替换 + :param res: jsonpath使用的返回结果 + :param value: + :return: + """ + sql_json_list = re.findall(r"\$json\((.*?)\)\$", value) + + for i in sql_json_list: + pattern = re.compile(r'\$json\(' + i.replace('$', "\$").replace('[', '\[') + r'\)\$') + key = str(sql_json(i, res)) + value = re.sub(pattern, key, value, count=1) + value = sql_regular(value, res) + + return value + + + +def cache_regular(value): + """ + 通过正则的方式,读取缓存中的内容 + 例:$cache{login_init} + :param value: + :return: + """ + # 正则获取 $cache{login_init}中的值 --> login_init + regular_dates = re.findall(r"\$cache\{(.*?)\}", value) + + # 拿到的是一个list,循环数据 + for regular_data in regular_dates: + value_types = ['int:', 'bool:', 'list:', 'dict:', 'tuple:', 'float:'] + if any(i in regular_data for i in value_types) is True: + + value_types = regular_data.split(":")[0] + pattern = re.compile(r'\'\$cache{' + regular_data+ r'(.*?)}\'') + regular_data = regular_data.split(":")[1] + + else: + pattern = re.compile(r'\$cache\{' + regular_data.replace('$', "\$").replace('[', '\[') + r'\}') + cache_data = Cache(regular_data).get_cache().replace(r'\n','').replace(r'\u2005', '') + value = re.sub(pattern, cache_data, value) + return value + +if __name__ == '__main__': + # res = regular("${{get_host11}}") + # print(res) + # aa = '$cache{login_token}' + # ress = cache_regular(aa) + # print(ress) + + a = Context().get_host11() + print(a) diff --git a/tools/report/pytest_html/result.html b/tools/report/pytest_html/result.html new file mode 100644 index 0000000..3586681 --- /dev/null +++ b/tools/report/pytest_html/result.html @@ -0,0 +1,484 @@ + + + + + Test Report + + + +

result.html

+

报告生成于 2022-09-01 17:24:05 by pytest-html v3.1.1

+

测试环境

+ + + + + + + + + + + + + + + + + + +
CI_ENVIRONMENT_SLUGtesting
JAVA_HOMEC:\Program Files\Java\jdk1.8.0_20
Packages{"pluggy": "1.0.0", "py": "1.11.0", "pytest": "6.2.5"}
PlatformWindows-10-10.0.19041-SP0
Plugins{"Faker": "9.8.3", "allure-pytest": "2.9.45", "forked": "1.3.0", "html": "3.1.1", "metadata": "1.11.0", "xdist": "2.4.0"}
Python3.6.6
+

汇总信息

+

总共 2 个测试用例运行时间 0.06 秒.

+ 2 passed, 0 skipped, 0 failed, 0 errors +

测试结果

+ + + + + + + + + + + + + + + + + + + + + + + +
执行结果测试用例用例耗时
Passedtools/cro.py::test0.04
+
------------------------------Captured stdout call------------------------------
调用次数: 1 +
Passedtools/cro.py::test20.00
+
------------------------------Captured stdout call------------------------------
调用次数: 2 +
\ No newline at end of file diff --git a/tools/request_control.py b/tools/request_control.py new file mode 100644 index 0000000..fa7a305 --- /dev/null +++ b/tools/request_control.py @@ -0,0 +1,189 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- + + +import requests +from tools.runtime_control import execution_duration +from tools.mysql_control import MysqlDB +from tools.log_decorator import log_decorator +from tools.allure_control import allure_step, allure_step_no, SqlSwitch,allure_attach +from tools.regular_control import cache_regular +from config.settings import ConfigHandler +from requests_toolbelt import MultipartEncoder +from typing import Any +import random +import os + +class Transmission: + JSON: str = "JSON" + PARAMS: str = "params" + DATE: str = "date" + FILE: str = 'file' + + +class RequestControl: + """ 封装请求 """ + + def __init__(self): + # TODO 初始化逻辑调整 + pass + + @classmethod + def _check_params(cls, response, yaml_data, headers, cookie, res_time, status_code, teardown,assert_data) -> Any: + """ 抽离出通用模块,判断 http_request 方法中的一些数据校验 """ + # 判断数据库开关,开启状态,则返回对应的数据 + if SqlSwitch() and yaml_data['sql'] is not None: + sql_data = MysqlDB().assert_execution(sql=yaml_data['sql'], resp=response) + return {"response_data": response, "sql_data": sql_data, "yaml_data": yaml_data, + "headers": headers, "cookie": cookie, "res_time": res_time, "status_code": status_code, + "teardown": teardown,"assert": assert_data} + else: + # 数据库关闭走的逻辑 + res = response + return {"response_data": res, "sql_data": {"sql": None}, "yaml_data": yaml_data, + "headers": headers, "cookie": cookie, "res_time": res_time, "status_code": status_code, + "teardown": teardown,"assert": assert_data} + + @classmethod + def response_elapsed_total_seconds(cls, res): + """获取接口响应时长""" + return res.elapsed.total_seconds() + + @classmethod + def text_encode(cls, text): + """unicode 解码""" + return text.encode("utf-8").decode("utf-8") + + @classmethod + def file_data_exit(cls, yaml_data, file_data): + """判断上传文件时,data参数是否存在""" + # 兼容又要上传文件,又要上传其他类型参数 + try: + for key, value in yaml_data['data']['data'].items(): + file_data[key] = value + except KeyError: + pass + + @classmethod + def multipart_data(cls, file_data): + multipart = MultipartEncoder( + fields=file_data, # 字典格式 + boundary='-----------------------------' + str(random.randint(int(1e28), int(1e29 - 1))) + ) + return multipart + + @classmethod + def file_prams_exit(cls, yaml_data, multipart): + # 判断上传文件接口,文件参数是否存在 + try: + params = yaml_data['data']['params'] + except KeyError: + params = None + return multipart, params + + def upload_file(self, yaml_data): + """ + 判断处理上传文件 + :param yaml_data: + :return: + """ + # 处理上传多个文件的情况 + yaml_data = eval(cache_regular(str(yaml_data))) + _files = [] + file_data = {} + # 兼容又要上传文件,又要上传其他类型参数 + self.file_data_exit(yaml_data, file_data) + for key, value in yaml_data['data']['file'].items(): + file_path = os.path.join(ConfigHandler.file_path,value) + file_data[key] = (value, open(file_path, 'rb'), 'application/octet-stream') + _files.append(file_data) + # allure中展示该附件 + allure_attach(source=file_path, name=value, extension=value) + multipart = self.multipart_data(file_data) + yaml_data['headers']['Content-Type'] = multipart.content_type + # yaml_data, multipart = self.file_prams_exit(yaml_data, multipart) + return yaml_data, multipart + + @log_decorator(True) + @execution_duration(3000) + # @encryption("md5") + def http_request(self, yaml_data, **kwargs): + """ + 请求封装 + :param yaml_data: 从yaml文件中读取出来的所有数据 + :param dependent_switch: + :param kwargs: + :return: + """ + _is_run = yaml_data['is_run'] + _url = yaml_data['url'] + _method = yaml_data['method'] + _detail = yaml_data['detail'] + _headers = yaml_data['headers'] + _requestType = yaml_data['requestType'].upper() + _data = yaml_data['data'] + _sql = yaml_data['sql'] + _teardown = yaml_data["teardown"] + # _teardown_sql = yaml_data[ 'teardown_sql'] + res = None + yaml_data = eval(cache_regular(str(yaml_data))) + _assert = yaml_data['assert'] + # 判断用例是否执行 + if _is_run is True or _is_run is None: + # 处理多业务逻辑 + # print("_requestType",_requestType) + # print(_requestType == "JSON") + if _requestType == "JSON": + #缓存里面正则替换 + yaml_data = eval(cache_regular(str(yaml_data))) + # print("djskfhsfhksh",yaml_data) + # print(yaml_data['data']) + _data = yaml_data['data'] + res = requests.request(method=_method, url=_url, json=_data,headers=yaml_data['headers'], verify=False, **kwargs) + elif _requestType == "PARAMS": + yaml_data = eval(cache_regular(str(yaml_data))) + _data = yaml_data['data'] + url = yaml_data[ 'url'] + if _data is not None: + # url 拼接的方式传参 + params_data = "?" + for k, v in _data.items(): + params_data += (k + "=" + str(v) + "&") + url = yaml_data[ 'url'] + params_data[:-1] + + res = requests.request(method=_method, url=url, headers=yaml_data['headers'], verify=False, **kwargs) + # 判断上传文件 + elif _requestType == 'FILE': + yaml_data = eval(cache_regular(str(yaml_data))) + multipart = self.upload_file(yaml_data) + res = requests.request(method=_method, url=yaml_data['url'], data=multipart[1], headers=multipart[0]['headers'], verify=False, **kwargs) + + elif _requestType == "DATE": + yaml_data = eval(cache_regular(str(yaml_data))) + res = requests.request(method=_method, url=yaml_data['url'], data=_data, headers=yaml_data['headers'], verify=False, **kwargs) + + + _status_code = res.status_code + allure_step_no(f"请求URL: {yaml_data['url']}") + allure_step_no(f"请求方式: {_method}") + allure_step("请求头: ", _headers) + allure_step("请求数据: ", _data) + allure_step("预期数据: ", _assert) + _res_time = self.response_elapsed_total_seconds(res) + allure_step_no(f"响应耗时(s): {_res_time}") + try: + res = res.json() + allure_step("响应结果111: ", res) + except: + res = self.text_encode(res.text) + allure_step("响应结果222: ", res) + try: + cookie = res.cookies.get_dict() + except: + cookie = None + return self._check_params(res, yaml_data, _headers, cookie, _res_time, _status_code, _teardown,_assert) + # return self._check_params(res, yaml_data, _headers, cookie, _res_time,_status_code) + else: + # 用例跳过执行的话,响应数据和sql数据为空 + return {"response_data": False, "sql_data": False, "yaml_data": yaml_data, "res_time": 0.00} + diff --git a/tools/runtime_control.py b/tools/runtime_control.py new file mode 100644 index 0000000..506a9c2 --- /dev/null +++ b/tools/runtime_control.py @@ -0,0 +1,39 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- + +import time +from tools.log_control import ERROR + + +def execution_duration(number: int): + """ + 封装统计函数执行时间装饰器 + :param number: 函数预计运行时长 + :return: + """ + # 判断参数类型是否是 int 类型 + if isinstance(number, int): + def decorator(func): + def swapper(*args, **kwargs): + # 定义开始时间 + start_time = int(round(time.time() * 1000)) + res = func(*args, **kwargs) + end_time = int(round(time.time() * 1000)) + run_time = end_time - start_time + # 计算时间戳毫米级别,如果时间大于number,则打印 函数名称 和运行时间 + if run_time > number: + ERROR.logger.error( + "\n=================================================================================\n" + "测试用例执行时间较长,请关注.\n" + "函数运行时间:{0} ms\n" + "测试用例相关数据: {1}\n" + "=================================================================================" + .format(run_time, func(*args, **kwargs))) + + return res + + return swapper + + return decorator + else: + raise TypeError("参数类型不正确") diff --git a/tools/sendmail_control.py b/tools/sendmail_control.py new file mode 100644 index 0000000..bf36d01 --- /dev/null +++ b/tools/sendmail_control.py @@ -0,0 +1,92 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- + + + +import smtplib +from email.mime.text import MIMEText +from tools.yaml_control import GetYamlData +from config.settings import ConfigHandler +from tools.allure_report_control import CaseCount, AllureFileClean + + +class SendEmail(object): + def __init__(self): + self.getData = GetYamlData(ConfigHandler.config_path).get_yaml_data()['email'] + self.send_user = self.getData['send_user'] # 发件人 + self.email_host = self.getData['email_host'] # QQ 邮件 STAMP 服务器地址 + self.key = self.getData['stmp_key'] # STAMP 授权码 + self.name = GetYamlData(ConfigHandler.config_path).get_yaml_data()['ProjectName'][0] + self.allureData = CaseCount() + self.PASS = self.allureData.pass_count() + self.FAILED = self.allureData.failed_count() + self.BROKEN = self.allureData.broken_count() + self.SKIP = self.allureData.skipped_count() + self.TOTAL = self.allureData.total_count() + self.RATE = self.allureData.pass_rate() + self.CaseDetail = AllureFileClean().get_failed_cases_detail() + + def send_mail(self, user_list: list, sub, content): + """ + + @param user_list: 发件人邮箱 + @param sub: + @param content: 发送内容 + @return: + """ + user = "郭林莉" + "<" + self.send_user + ">" + message = MIMEText(content, _subtype='plain', _charset='utf-8') + message['Subject'] = sub + message['From'] = user + message['To'] = ";".join(user_list) + server = smtplib.SMTP() + server.connect(self.email_host) + server.login(self.send_user, self.key) + server.sendmail(user, user_list, message.as_string()) + server.close() + + def error_mail(self, error_message): + """ + 执行异常邮件通知 + @param error_message: 报错信息 + @return: + """ + emali = self.getData['send_list'] + user_list = emali.split(',') # 多个邮箱发送,config文件中直接添加 '806029174@qq.com' + + sub = self.name + "接口自动化执行异常通知" + content = "自动化测试执行完毕,程序中发现异常,请悉知。报错信息如下:\n{0}".format(error_message) + self.send_mail(user_list, sub, content) + + def send_main(self): + """ + 发送邮件 + :return: + """ + + emali = self.getData["send_list"] + user_list = emali.split(',') # 多个邮箱发送,yaml文件中直接添加 '806029174@qq.com' + + sub = self.name + "接口自动化报告" + content = """ + 各位同事, 大家好: + 自动化用例执行完成,执行结果如下: + 用例运行总数: {} 个 + 通过用例个数: {} 个 + 失败用例个数: {} 个 + 异常用例个数: {} 个 + 跳过用例个数: {} 个 + 成 功 率: {} % + + {} + + ********************************** + jenkins地址:https://121.xx.xx.47:8989/login + 详细情况可登录jenkins平台查看,非相关负责人员可忽略此消息。谢谢。 + """.format(self.TOTAL, self.PASS, self.FAILED, self.BROKEN, self.SKIP, self.RATE, self.CaseDetail) + + self.send_mail(user_list, sub, content) + + +if __name__ == '__main__': + SendEmail().send_main() diff --git a/tools/teardown_control.py b/tools/teardown_control.py new file mode 100644 index 0000000..278ddd8 --- /dev/null +++ b/tools/teardown_control.py @@ -0,0 +1,147 @@ +from tools.jsonpath import jsonpath +from tools.request_control import RequestControl +from tools.regular_control import cache_regular, sql_regular, regular +from tools.jsonpath_date_replace import jsonpath_replace +from tools.mysql_control import MysqlDB +from tools.assert_control import SqlSwitch +from tools.log_control import WARNING +from tools.cache_control import Cache + + +class TearDownHandler: + """ 处理yaml格式后置请求 """ + + @classmethod + def get_teardown_data(cls, case_data): + return case_data["teardown"] + + @classmethod + def get_response_data(cls, case_data): + return case_data['response_data'] + + @classmethod + def get_teardown_sql(cls, case_data): + return case_data['teardown_sql'] + + @classmethod + def jsonpath_replace_data(cls, replace_key, replace_value): + # 通过jsonpath判断出需要替换数据的位置 + _change_data = replace_key.split(".") + # jsonpath 数据解析 + _new_data = jsonpath_replace(change_data=_change_data, key_name='_teardown_case') + # 最终提取到的数据,转换成 _teardown_case[xxx][xxx] + _new_data += ' = {0}'.format(replace_value) + return _new_data + + @classmethod + def get_cache_name(cls, replace_key, resp_case_data): + """ + 获取缓存名称,并且讲提取到的数据写入缓存 + """ + if "$set_cache{" in replace_key and "}" in replace_key: + start_index = replace_key.index("$set_cache{") + end_index = replace_key.index("}", start_index) + old_value = replace_key[start_index:end_index + 2] + cache_name = old_value[11:old_value.index("}")] + Cache(cache_name).set_caches(resp_case_data) + + @classmethod + def regular_testcase(cls, teardown_case): + """处理测试用例中的动态数据""" + test_case = regular(str(teardown_case)) + test_case = eval(cache_regular(str(test_case))) + return test_case + + @classmethod + def teardown_http_requests(cls, teardown_case): + """发送后置请求""" + test_case = cls.regular_testcase(teardown_case) + res = RequestControl().http_request(yaml_data=test_case, dependent_switch=False) + return res + + def teardown_handle(self, case_data): + """ 后置处理逻辑 """ + # 拿到用例信息 + case_data = eval(cache_regular(str(case_data))) + _teardown_data = self.get_teardown_data(case_data) + # 获取接口的响应内容 + _resp_data = case_data['response_data'] + # 获取接口的请求参数 + _request_data = case_data['yaml_data']['data'] + # print("生成的名字存到缓存",_request_data) + # 判断如果没有 teardown + if _teardown_data is not None: + # 循环 teardown中的接口 + for _data in _teardown_data: + + _step = _data['step'] + # print('_step', _step) + # _teardown_case = eval(Cache('case_process').get_cache())[_case_id] + # res = self.teardown_http_requests(_teardown_case) + for i in _step: + # 判断请求类型为自己 + if i['dependent_type'] == 'self_response': + _set_value = i['set_value'] + _response_dependent = jsonpath(obj=_resp_data, expr=i['jsonpath']) + # 如果提取到数据,则进行下一步 + if _response_dependent is not False: + _resp_case_data = _response_dependent[0] + # 拿到 set_cache 然后将数据写入缓存 + + Cache(_set_value).set_caches(_resp_case_data) + self.get_cache_name(replace_key=_set_value, resp_case_data=_resp_case_data) + else: + raise ValueError(f"jsonpath提取失败,替换内容: {_resp_data} \n" + f"jsonpath: {i['jsonpath']}") + + # 判断从响应内容提取数据 + if i['dependent_type'] == 'response': + _resp_set_value = i['set_value'] + _response_dependent = jsonpath(obj=_resp_data, expr=i['jsonpath']) + # print("_request_dependentS是",_resp_data,_response_dependent) + + # 如果提取到数据,则进行下一步 + if _response_dependent is not False: + _resp_case_data = _response_dependent[0] + self.get_cache_name(replace_key=_resp_set_value, resp_case_data=_resp_case_data) + else: + raise ValueError(f"jsonpath提取失败,替换内容: {_resp_data}" f"jsonpath: {i['jsonpath']}") + # 判断请求中的数据 + elif i['dependent_type'] == 'request': + _request_set_value = i['set_value'] + _request_dependent = jsonpath(obj=_request_data, expr=i['jsonpath']) + if _request_dependent is not False: + _request_case_data = _request_dependent[0] + # print("真正存缓存的数据%s"%i['set_value'], _request_case_data) + self.get_cache_name(replace_key=_request_set_value, resp_case_data=_request_case_data) + else: + raise ValueError(f"jsonpath提取失败,替换内容: {_request_data} \n" + f"jsonpath: {i['jsonpath']}") + + elif i['dependent_type'] == 'cache': + + _cache_data = Cache(i['cache_data']).get_cache() + # _cache_data = eval(cache_regular(str(i['cache_data']))) + _replace_key = i['replace_key'] + exec(self.jsonpath_replace_data(replace_key=_replace_key, replace_value=_cache_data)) + # + # self.teardown_sql(case_data) + # test_case = self.regular_testcase(_teardown_case) + # # res = self.teardown_http_requests(test_case) + # Assert(test_case['assert']).assert_equality(response_data=res['response_data'], + # sql_data=res['sql_data']) + + def teardown_sql(self, case_data): + """处理后置sql""" + sql_data = self.get_teardown_sql(case_data) + _response_data = case_data['response_data'] + if sql_data is not None: + for i in sql_data: + if SqlSwitch(): + _sql_data = sql_regular(value=i, res=_response_data) + # print(_sql_data) + MysqlDB().execute(_sql_data) + else: + WARNING.logger.warning(f"程序中检查到您数据库开关为关闭状态,已为您跳过删除sql: {i}") + + diff --git a/tools/testcase_template.py b/tools/testcase_template.py new file mode 100644 index 0000000..475a9c7 --- /dev/null +++ b/tools/testcase_template.py @@ -0,0 +1,118 @@ +import datetime +import os +from tools.yaml_control import GetYamlData +from config.settings import ConfigHandler + + +def write_case(case_path, page): + with open(case_path, 'w', encoding="utf-8") as f: + f.write(page) + +def writePageFiles(classTitle, funcTitle, casePath, yamlPath): + """ + 自动写成 py 文件 + :param yamlPath:yaml的路径 + :param casePath: 生成的py文件地址 + :param classTitle: 类名称, 读取用例中的 caseTitle 作为类名称 + :param funcTitle: 函数名称 caseTitle,首字母小写 + :param caseDetail: 函数描述,读取用例中的描述内容,做为函数描述 + :return: + """ + Author = GetYamlData(ConfigHandler.config_path).get_yaml_data()['TestName'] + now = datetime.datetime.now().strftime('%Y-%m-%d %H:%M:%S') + + page = f'''#!/usr/bin/env python +# -*- coding: utf-8 -*- + +from tools.request_control import RequestControl +from config.settings import ConfigHandler +from tools.yaml_data_analysis import CaseData +from tools.regular_control import regular +import os +from tools.teardown_control import TearDownHandler + +class {classTitle}(object): + @staticmethod + def {funcTitle}(inData): + """ + :param inData: + :return: + """ + res = RequestControl().http_request(eval(inData)) + TearDownHandler().teardown_handle(res) + return res + + +if __name__ == '__main__': + + TestData = CaseData(os.path.join(ConfigHandler.data_path,'{yamlPath}')).case_process()[0] + re_data = regular(str(TestData)) + data = {classTitle}().{funcTitle}(re_data) + print(data) + ''' + with open(casePath, 'w', encoding="utf-8") as f: + f.write(page) + +def write_testcase_file(markers,allure_epic, allure_feature, class_title,func_title, case_path, yaml_path, file_name, allure_story): + """ + + :param allure_story: + :param file_name: 文件名称 + :param allure_epic: 项目名称 + :param allure_feature: 模块名称 + :param class_title: 类名称 + :param func_title: 函数名称 + :param case_path: case 路径 + :param yaml_path: yaml 文件路径 + :return: + """ + conf_data = GetYamlData(ConfigHandler.config_path).get_yaml_data() + author = conf_data['TestName'] + now = datetime.datetime.now().strftime('%Y-%m-%d %H:%M:%S') + real_time_update_test_cases = conf_data['real_time_update_test_cases'] + + page = f'''#!/usr/bin/env python +# -*- coding: utf-8 -*- + +import allure +import pytest +from config.settings import ConfigHandler +from tools.yaml_data_analysis import CaseData +from tools.assert_control import Assert +from tools.request_control import RequestControl +from tools.regular_control import regular +from tools.teardown_control import TearDownHandler +import os + +TestData = CaseData(os.path.join(ConfigHandler.data_path,'{yaml_path}')).case_process() +re_data = regular(str(TestData)) + +{markers} +@allure.epic("{allure_epic}") +@allure.feature("{allure_feature}") +class Test{class_title}: + + @allure.story("{allure_story}") + @pytest.mark.parametrize('in_data', eval(re_data), ids=[i['detail'] for i in TestData]) + def test_{func_title}(self, in_data, case_skip): + """ + :param : + :return: + """ + + res = RequestControl().http_request(in_data) + TearDownHandler().teardown_handle(res) + Assert(res['assert']).assert_equality(response_data=res['response_data'], + sql_data=res['sql_data']) + + +if __name__ == '__main__': + pytest.main(['{file_name}', '-s', '-W', 'ignore:Module already imported:pytest.PytestWarning']) +''' + if real_time_update_test_cases: + write_case(case_path=case_path, page=page) + elif real_time_update_test_cases is False: + if not os.path.exists(case_path): + write_case(case_path=case_path, page=page) + else: + raise ValueError("real_time_update_test_cases 配置不正确,只能配置 True 或者 False") diff --git a/tools/yaml_control.py b/tools/yaml_control.py new file mode 100644 index 0000000..7dfa12e --- /dev/null +++ b/tools/yaml_control.py @@ -0,0 +1,102 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- + +import yaml.scanner,yaml.parser +import os +from tools.regular_control import regular + + +class GetYamlData: + + def __init__(self, file_dir): + self.fileDir = file_dir + + + def get_yaml_data(self) -> dict: + """ + 获取 yaml 中的数据 + :param: fileDir: + :return: + """ + # 判断文件是否存在 + if os.path.exists(self.fileDir): + data = open(self.fileDir, 'r', encoding='utf-8') + try: + res = yaml.load(data, Loader=yaml.FullLoader) + return res + except UnicodeDecodeError: + raise ValueError(f"yaml文件编码错误,文件路径:{self.fileDir}") + + else: + raise FileNotFoundError("文件路径不存在") + + def write_yaml_data(self, key: str, value) -> int: + """ + 更改 yaml 文件中的值 + :param key: 字典的key + :param value: 写入的值 + :return: + """ + with open(self.fileDir, 'r', encoding='utf-8') as f: + # 创建了一个空列表,里面没有元素 + lines = [] + for line in f.readlines(): + if line != '\n': + lines.append(line) + f.close() + + with open(self.fileDir, 'w', encoding='utf-8') as f: + flag = 0 + for line in lines: + left_str = line.split(":")[0] + if key == left_str and '#' not in line: + newline = "{0}: {1}".format(left_str, value) + line = newline + f.write('%s\n' % line) + flag = 1 + else: + f.write('%s' % line) + f.close() + return flag + + +class GetCaseData(GetYamlData): + + def get_different_formats_yaml_data(self) -> list: + """ + 获取用例的名字 + :return: + """ + res_list = [] + for i in self.get_yaml_data(): + res_list.append(i) + return res_list + + def get_yaml_case_data(self): + """ + 获取测试用例数据, 转换成指定数据格式 + :return: + """ + + try: + _yaml_data = self.get_yaml_data() + # 正则处理yaml文件中的数据 + re_data = regular(str(_yaml_data)) + return eval(re_data) + except yaml.parser.ParserError as e: + raise yaml.parser.ParserError("yaml格式不正确, 请检查下方对应路径中的文件内容 {0}".format(e)) + except yaml.scanner.ScannerError as e: + raise yaml.scanner.ScannerError("yaml格式不正确, 请检查下方对应路径中的文件内容 {0}".format(e)) + + + +if __name__ == '__main__': + from config.settings import ConfigHandler + print(ConfigHandler.data_path + r'\dmsystem\user\adduser.yaml') + TestData2 = GetCaseData(ConfigHandler.data_path + r'\dmsystem\user\adduser.yaml').get_yaml_data() + print(TestData2) + + TestData1 = GetCaseData(ConfigHandler.data_path + r'\dmsystem\user\adduser.yaml').get_different_formats_yaml_data() + print(TestData1) + TestData = GetCaseData(ConfigHandler.data_path + r'\dmsystem\user\adduser.yaml').get_yaml_case_data() + print(TestData) diff --git a/tools/yaml_data_analysis.py b/tools/yaml_data_analysis.py new file mode 100644 index 0000000..bc8e3b3 --- /dev/null +++ b/tools/yaml_data_analysis.py @@ -0,0 +1,275 @@ +from tools.allure_control import SqlSwitch +from tools.yaml_control import GetYamlData + + +class CaseData: + """ + yaml 数据解析, 判断数据填写是否符合规范 + """ + + def __init__(self, file_path): + self.filePath = file_path + + def case_process(self, case_id_switch=None): + """ + 数据清洗之后,返回该 yaml 文件中的所有用例 + :param case_id_switch: 判断数据清洗,是否需要清洗出 case_id, 主要用于兼容用例池中的数据 + :return: + """ + dates = GetYamlData(self.filePath).get_yaml_data() + case_lists = [] + for key, values in dates.items(): + # print(key,values) + # 公共配置中的数据,与用例数据不同,需要单独处理 + if key != 'case_common': + case_date = { + 'method': self.get_case_method(case_id=key, case_data=values), + 'url': self.get_case_host(case_id=key, case_data=values), + 'detail': self.get_case_detail(case_id=key, case_data=values), + 'headers': self.get_headers(case_id=key, case_data=values, file_path=self.filePath), + 'is_run': self.get_is_run(key, values), + 'requestType': self.get_request_type(key, values), + 'data': self.get_case_dates(key, values), + "sql": self.get_sql(key, values), + "assert": self.get_assert(key, values), + "setup_sql": self.setup_sql(values), + "teardown": self.tear_down(values), + "teardown_sql": self.teardown_sql(values) + } + + if case_id_switch is True: + case_lists.append({key: case_date}) + else: + # 正则处理,如果用例中有需要读取缓存中的数据,则优先读取缓存 + + case_lists.append(case_date) + return case_lists + + def get_case_host(self, case_id: str, case_data: dict) -> str: + """ + 获取用例的 host + :return: + """ + try: + _url = case_data['url'] + _host = case_data['host'] + if _url is None or _host is None: + raise ValueError(f"用例中的 url 或者 host 不能为空!\n 用例ID: {case_id} \n 用例路径: {self.filePath}") + else: + return _host + _url + except KeyError: + raise KeyError(self.raise_value_null_error(data_name="url 或 host", case_id=case_id)) + + def get_case_method(self, case_id: str, case_data: dict) -> str: + """ + 获取用例的请求方式:GET/POST/PUT/DELETE + :return: + """ + try: + _case_method = case_data['method'] + _request_method = ['GET', 'POST', 'PUT', 'DELETE', 'PATCH', 'HEAD', 'OPTION'] + if _case_method.upper() in _request_method: + return _case_method.upper() + else: + raise ValueError(f"method 目前只支持 {_request_method} 请求方式,如需新增请联系管理员. " + f"{self.raise_value_error(data_name='请求方式', case_id=case_id, detail=_case_method)}") + + except AttributeError: + raise ValueError(f"method 目前只支持 { ['GET', 'POST', 'PUT', 'DELETE', 'PATCH', 'HEAD', 'OPTION']} 请求方式," + f"如需新增请联系管理员! " + f"{self.raise_value_error(data_name='请求方式', case_id=case_id, detail=case_data['method'])}") + except KeyError: + raise KeyError(self.raise_value_null_error(data_name="method", case_id=case_id)) + + def get_case_detail(self, case_id, case_data: dict) -> str: + """ + 获取用例描述 + :return: + """ + try: + return case_data['detail'] + except KeyError: + raise KeyError(self.raise_value_null_error(case_id=case_id, data_name="detail")) + + def get_headers(self, case_id: str, case_data: dict, file_path: str) -> dict: + """ + 胡求用例请求头中的信息 + :return: + """ + try: + _header = case_data['headers'] + return _header + except KeyError: + raise KeyError(self.raise_value_null_error(case_id=case_id, data_name="headers")) + + def raise_value_error(self, data_name: str, case_id: str, detail: [str, list, dict]): + """ + 所有用例填写不规范的异常提示 + :param data_name: 参数名称 + :param case_id: 用例ID + :param detail: 参数内容 + :return: + """ + detail = f"用例中的 {data_name} 填写不正确!\n 用例ID: {case_id} \n 用例路径: {self.filePath}\n" \ + f"当前填写的内容: {detail}" + + return detail + + def raise_value_null_error(self, data_name: str, case_id: str): + """ + 用例中参数名称为空的异常提示 + :param data_name: 参数名称 + :param case_id: 用例ID + :return: + """ + detail = f"用例中未找到 {data_name} 参数, 如已填写,请检查用例缩进是否存在问题" \ + f"用例ID: {case_id} " \ + f"用例路径: {self.filePath}" + + return detail + + def get_request_type(self, case_id: str, case_data: dict) -> str: + """ + 获取请求类型,params、data、json + :return: + """ + + _types = ['JSON', 'PARAMS', 'FILE', 'DATE'] + try: + _request_type = case_data['requestType'] + # 判断用户填写的 requestType是否符合规范 + if _request_type.upper() in _types: + return _request_type.upper() + else: + raise ValueError(self.raise_value_error(data_name='requestType', case_id=case_id, detail=_request_type)) + # 异常捕捉 + except AttributeError: + raise ValueError(self.raise_value_error(data_name='requestType', + case_id=case_id, detail=case_data['requestType'])) + except KeyError: + raise KeyError(self.raise_value_null_error(case_id=case_id, data_name="requestType")) + + def get_is_run(self, case_id: str, case_data: dict) -> str: + """ + 获取执行状态, 为 true 或者 None 都会执行 + :return: + """ + try: + return case_data['is_run'] + except KeyError: + raise KeyError(self.raise_value_null_error(case_id=case_id, data_name="is_run")) + + def get_dependence_case(self, case_id: str, case_data: dict) -> dict: + """ + 获取是否依赖的用例 + :return: + """ + try: + _dependence_case = case_data['dependence_case'] + return _dependence_case + except KeyError: + raise KeyError(self.raise_value_null_error(case_id=case_id, data_name="dependence_case")) + + def get_dependence_case_data(self, case_id: str, case_data: dict) -> dict: + """ + 获取依赖的用例 + :return: + """ + # 判断如果该用例有依赖,则返回依赖数据,否则返回None + if self.get_dependence_case(case_id=case_id, case_data=case_data): + try: + _dependence_case_data = case_data['dependence_case_data'] + # 判断当用例中设置的需要依赖用例,但是dependence_case_data下方没有填写依赖的数据,异常提示 + if _dependence_case_data is None: + raise ValueError(f"dependence_case_data 依赖数据中缺少依赖相关数据!" + f"如有填写,请检查缩进是否正确" + f"用例ID: {case_id}" + f"用例路径: {self.filePath}") + + return _dependence_case_data + except KeyError: + raise KeyError(self.raise_value_null_error(case_id=case_id, data_name="dependence_case_data")) + else: + return {"dependence_case_data": None} + + def get_case_dates(self, case_id: str, case_data: dict) -> dict: + """ + 获取请求数据 + :param case_id: + :param case_data: + :return: + """ + try: + _dates = case_data['data'] + return _dates + except KeyError: + raise KeyError(self.raise_value_null_error(case_id=case_id, data_name="data")) + + def get_assert(self, case_id: str, case_data: dict): + """ + 获取需要断言的数据 + :return: + """ + try: + _assert = case_data['assert'] + if _assert is None: + raise self.raise_value_error(data_name="assert", case_id=case_id, detail=_assert) + return case_data['assert'] + except KeyError: + raise KeyError(self.raise_value_null_error(case_id=case_id, data_name="assert")) + + def get_sql(self, case_id: str, case_data: dict): + """ + 获取测试用例中的断言sql + :return: + """ + try: + _sql = case_data['sql'] + # 判断数据库开关为开启状态,并且sql不为空 + if SqlSwitch() and _sql is not None: + return case_data['sql'] + else: + return None + except KeyError: + raise KeyError(self.raise_value_null_error(case_id=case_id, data_name="sql")) + + @classmethod + def setup_sql(cls, case_data: dict): + """ + 获取前置sql,比如该条用例中需要从数据库中读取sql作为用例参数,则需填写setup_sql + :return: + """ + try: + _setup_sql = case_data['setup_sql'] + return _setup_sql + except KeyError: + return None + + @classmethod + def tear_down(cls, case_data: dict): + """ + 获取后置请求数据 + """ + try: + _teardown = case_data['teardown'] + return _teardown + except KeyError: + return None + + @classmethod + def teardown_sql(cls, case_data: dict): + """ + 获取前置sql,比如该条用例中需要从数据库中读取sql作为用例参数,则需填写setup_sql + :return: + """ + try: + _teardown_sql = case_data['teardown_sql'] + return _teardown_sql + except KeyError: + return None + + +if __name__ == '__main__': + from config.settings import ConfigHandler + aa = CaseData(ConfigHandler.data_path + r'\dmsystem\user\adduser.yaml').case_process() + print(aa) \ No newline at end of file