test2.py 4.9 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148
  1. from pathlib import Path
  2. import json
  3. import pandas as pd
  4. import requests
  5. from io import StringIO
  6. from fastapi import FastAPI, HTTPException
  7. from pydantic import BaseModel
  8. from models import AnalysisRequest
  9. import uvicorn
  10. app = FastAPI(title="飞参判读API")
  11. # 获取当前文件的父目录的父目录
  12. current_dir = Path(__file__).parent
  13. json_path = current_dir.parent / "config" / "config.json"
  14. # 读取 JSON
  15. with open(json_path, "r", encoding="utf-8") as f:
  16. config = json.load(f)
  17. headers = config['mutationFailures']
  18. # 故障分数公式为 60 - 60 * A(A)为系数
  19. # 逻辑判断函数
  20. def check_abnormal_headers(dataframe):
  21. abnormal_headers = []
  22. headers = config['logicalFailures']
  23. # 遍历所有列(表头)
  24. for header in headers:
  25. # 检查该列是否存在至少一个1
  26. if (dataframe[header] == 1).any():
  27. abnormal_headers.append(header)
  28. return abnormal_headers
  29. # 突发故障判断逻辑
  30. def analyze_excel_data(dataframe):
  31. results = []
  32. faults = [] # 故障
  33. degradations = [] # 退化
  34. headers = config['mutationFailures']
  35. for column_rule in headers:
  36. column_name = column_rule["column"]
  37. normal_range = column_rule["normalRange"]
  38. degradation_range = column_rule["degradationInterval"]
  39. variable = column_rule["variable"]
  40. normal_lower, normal_upper = normal_range[0], normal_range[1]
  41. degradation_lower, degradation_upper = degradation_range[0], degradation_range[1]
  42. # 数据预处理
  43. col_data = pd.to_numeric(dataframe[column_name], errors="coerce").dropna()
  44. if len(col_data) == 0:
  45. continue
  46. # 突发故障判断(原有逻辑)
  47. if ((col_data > degradation_upper) | (col_data < degradation_lower)).any():
  48. results.append(f"{column_name}突发故障")
  49. faults.append(f"{column_name}突变故障")
  50. continue
  51. # 新增:相邻值变化量检测 ----------------------------------------
  52. is_fault_detected = False
  53. # 遍历所有相邻数据点(至少需要2个数据)
  54. for i in range(len(col_data) - 1):
  55. current_val = col_data.iloc[i]
  56. next_val = col_data.iloc[i + 1]
  57. # 判断两个值是否都处于退化区间内
  58. if (current_val >= degradation_lower) and (current_val <= degradation_upper) \
  59. and (next_val >= degradation_lower) and (next_val <= degradation_upper):
  60. # 计算变化量绝对值
  61. delta = abs(next_val - current_val)
  62. if delta > variable:
  63. results.append(f"{column_name}突变故障")
  64. faults.append(f"{column_name}突变故障")
  65. is_fault_detected = True
  66. break # 发现故障后立即终止循环
  67. if is_fault_detected:
  68. continue # 跳过后续判断
  69. # -----------------------------------------------------------
  70. # 原有退化判断逻辑
  71. if ((col_data > normal_upper) & (col_data <= degradation_upper)).any():
  72. results.append(f"{column_name}偏大")
  73. degradations.append(column_name)
  74. continue
  75. if ((col_data >= degradation_lower) & (col_data < normal_lower)).any():
  76. results.append(f"{column_name}偏小")
  77. degradations.append(f"{column_name}偏小")
  78. fault_result = {
  79. "results": results,
  80. "fault": faults,
  81. "degradation": degradations
  82. }
  83. return fault_result
  84. @app.post("/process/faultDiagnosis", summary="分析发动机健康状态")
  85. async def analyze_engine_health(request: AnalysisRequest):
  86. """
  87. 通过 CSV 数据 URL 分析发动机状态,返回:
  88. - 健康评分
  89. - 故障列表
  90. - 性能退化指标
  91. """
  92. try:
  93. # 获取并解析数据
  94. response = requests.get(request.url)
  95. response.encoding = 'UTF-8'
  96. csv_data = StringIO(response.text)
  97. dataframe = pd.read_csv(csv_data, index_col=False)
  98. # 执行分析
  99. header_issues = check_abnormal_headers(dataframe)
  100. analysis_result = analyze_excel_data(dataframe)
  101. # 合并结果
  102. combined_faults = header_issues + analysis_result["fault"]
  103. return {
  104. "code": 200,
  105. "msg": "操作成功",
  106. "data": {
  107. "score": 58, # 待实现实际计分逻辑
  108. "fault": combined_faults,
  109. "degradation": analysis_result["degradation"]
  110. }
  111. }
  112. except requests.RequestException as e:
  113. raise HTTPException(status_code=400, detail=f"数据获取失败: {str(e)}")
  114. except pd.errors.ParserError as e:
  115. raise HTTPException(status_code=422, detail="CSV 数据解析失败")
  116. except KeyError as e:
  117. raise HTTPException(status_code=500, detail=f"配置字段缺失: {str(e)}")
  118. if __name__ == "__main__":
  119. uvicorn.run(app, host="0.0.0.0", port=8848)