|
@@ -0,0 +1,148 @@
|
|
|
+from pathlib import Path
|
|
|
+import json
|
|
|
+import pandas as pd
|
|
|
+import requests
|
|
|
+from io import StringIO
|
|
|
+from fastapi import FastAPI, HTTPException
|
|
|
+from pydantic import BaseModel
|
|
|
+from models import AnalysisRequest
|
|
|
+import uvicorn
|
|
|
+
|
|
|
+app = FastAPI(title="飞参判读API")
|
|
|
+
|
|
|
+# 获取当前文件的父目录的父目录
|
|
|
+current_dir = Path(__file__).parent
|
|
|
+json_path = current_dir.parent / "config" / "config.json"
|
|
|
+
|
|
|
+# 读取 JSON
|
|
|
+with open(json_path, "r", encoding="utf-8") as f:
|
|
|
+ config = json.load(f)
|
|
|
+
|
|
|
+headers = config['mutationFailures']
|
|
|
+
|
|
|
+# 故障分数公式为 60 - 60 * A(A)为系数
|
|
|
+
|
|
|
+
|
|
|
+# 逻辑判断函数
|
|
|
+def check_abnormal_headers(dataframe):
|
|
|
+ abnormal_headers = []
|
|
|
+ headers = config['logicalFailures']
|
|
|
+
|
|
|
+ # 遍历所有列(表头)
|
|
|
+ for header in headers:
|
|
|
+ # 检查该列是否存在至少一个1
|
|
|
+ if (dataframe[header] == 1).any():
|
|
|
+ abnormal_headers.append(header)
|
|
|
+
|
|
|
+ return abnormal_headers
|
|
|
+
|
|
|
+
|
|
|
+# 突发故障判断逻辑
|
|
|
+def analyze_excel_data(dataframe):
|
|
|
+ results = []
|
|
|
+ faults = [] # 故障
|
|
|
+ degradations = [] # 退化
|
|
|
+ headers = config['mutationFailures']
|
|
|
+
|
|
|
+ for column_rule in headers:
|
|
|
+ column_name = column_rule["column"]
|
|
|
+ normal_range = column_rule["normalRange"]
|
|
|
+ degradation_range = column_rule["degradationInterval"]
|
|
|
+ variable = column_rule["variable"]
|
|
|
+
|
|
|
+ normal_lower, normal_upper = normal_range[0], normal_range[1]
|
|
|
+ degradation_lower, degradation_upper = degradation_range[0], degradation_range[1]
|
|
|
+
|
|
|
+ # 数据预处理
|
|
|
+ col_data = pd.to_numeric(dataframe[column_name], errors="coerce").dropna()
|
|
|
+ if len(col_data) == 0:
|
|
|
+ continue
|
|
|
+
|
|
|
+ # 突发故障判断(原有逻辑)
|
|
|
+ if ((col_data > degradation_upper) | (col_data < degradation_lower)).any():
|
|
|
+ results.append(f"{column_name}突发故障")
|
|
|
+ faults.append(f"{column_name}突变故障")
|
|
|
+ continue
|
|
|
+
|
|
|
+ # 新增:相邻值变化量检测 ----------------------------------------
|
|
|
+ is_fault_detected = False
|
|
|
+ # 遍历所有相邻数据点(至少需要2个数据)
|
|
|
+ for i in range(len(col_data) - 1):
|
|
|
+ current_val = col_data.iloc[i]
|
|
|
+ next_val = col_data.iloc[i + 1]
|
|
|
+
|
|
|
+ # 判断两个值是否都处于退化区间内
|
|
|
+ if (current_val >= degradation_lower) and (current_val <= degradation_upper) \
|
|
|
+ and (next_val >= degradation_lower) and (next_val <= degradation_upper):
|
|
|
+ # 计算变化量绝对值
|
|
|
+ delta = abs(next_val - current_val)
|
|
|
+ if delta > variable:
|
|
|
+ results.append(f"{column_name}突变故障")
|
|
|
+ faults.append(f"{column_name}突变故障")
|
|
|
+ is_fault_detected = True
|
|
|
+ break # 发现故障后立即终止循环
|
|
|
+
|
|
|
+ if is_fault_detected:
|
|
|
+ continue # 跳过后续判断
|
|
|
+ # -----------------------------------------------------------
|
|
|
+
|
|
|
+ # 原有退化判断逻辑
|
|
|
+ if ((col_data > normal_upper) & (col_data <= degradation_upper)).any():
|
|
|
+ results.append(f"{column_name}偏大")
|
|
|
+ degradations.append(column_name)
|
|
|
+ continue
|
|
|
+
|
|
|
+ if ((col_data >= degradation_lower) & (col_data < normal_lower)).any():
|
|
|
+ results.append(f"{column_name}偏小")
|
|
|
+ degradations.append(f"{column_name}偏小")
|
|
|
+
|
|
|
+ fault_result = {
|
|
|
+ "results": results,
|
|
|
+ "fault": faults,
|
|
|
+ "degradation": degradations
|
|
|
+ }
|
|
|
+
|
|
|
+ return fault_result
|
|
|
+
|
|
|
+
|
|
|
+@app.post("/process/faultDiagnosis", summary="分析发动机健康状态")
|
|
|
+async def analyze_engine_health(request: AnalysisRequest):
|
|
|
+ """
|
|
|
+ 通过 CSV 数据 URL 分析发动机状态,返回:
|
|
|
+ - 健康评分
|
|
|
+ - 故障列表
|
|
|
+ - 性能退化指标
|
|
|
+ """
|
|
|
+ try:
|
|
|
+ # 获取并解析数据
|
|
|
+ response = requests.get(request.data_url)
|
|
|
+ response.encoding = 'UTF-8'
|
|
|
+ csv_data = StringIO(response.text)
|
|
|
+ dataframe = pd.read_csv(csv_data, index_col=False)
|
|
|
+
|
|
|
+ # 执行分析
|
|
|
+ header_issues = check_abnormal_headers(dataframe)
|
|
|
+ analysis_result = analyze_excel_data(dataframe)
|
|
|
+
|
|
|
+ # 合并结果
|
|
|
+ combined_faults = header_issues + analysis_result["fault"]
|
|
|
+
|
|
|
+ return {
|
|
|
+ "code": 200,
|
|
|
+ "msg": "操作成功",
|
|
|
+ "data": {
|
|
|
+ "score": 58, # 待实现实际计分逻辑
|
|
|
+ "fault": combined_faults,
|
|
|
+ "degradation": analysis_result["degradation"]
|
|
|
+ }
|
|
|
+ }
|
|
|
+ except requests.RequestException as e:
|
|
|
+ raise HTTPException(status_code=400, detail=f"数据获取失败: {str(e)}")
|
|
|
+ except pd.errors.ParserError as e:
|
|
|
+ raise HTTPException(status_code=422, detail="CSV 数据解析失败")
|
|
|
+ except KeyError as e:
|
|
|
+ raise HTTPException(status_code=500, detail=f"配置字段缺失: {str(e)}")
|
|
|
+
|
|
|
+
|
|
|
+if __name__ == "__main__":
|
|
|
+ uvicorn.run(app, host="0.0.0.0", port=8848)
|