fengdexin 1 месяц назад
Родитель
Сommit
15da45824b
5 измененных файлов с 52 добавлено и 19622 удалено
  1. 1 1
      .idea/Hang_KG.iml
  2. 36 58
      api/actions.py
  3. 15 7
      api/test.py
  4. 0 19556
      data/排气原始数据.csv
  5. BIN
      data/排气原始数据.xlsx

+ 1 - 1
.idea/Hang_KG.iml

@@ -2,7 +2,7 @@
 <module type="PYTHON_MODULE" version="4">
   <component name="NewModuleRootManager">
     <content url="file://$MODULE_DIR$" />
-    <orderEntry type="inheritedJdk" />
+    <orderEntry type="jdk" jdkName="Python 3.7" jdkType="Python SDK" />
     <orderEntry type="sourceFolder" forTests="false" />
   </component>
 </module>

+ 36 - 58
api/actions.py

@@ -1,6 +1,10 @@
 from pathlib import Path
 import json
 import pandas as pd
+import requests
+from io import StringIO
+from flask import Flask, jsonify, request
+app = Flask(__name__)
 
 # 获取当前文件的父目录的父目录
 current_dir = Path(__file__).parent
@@ -16,63 +20,22 @@ headers = config['mutationFailures']
 
 
 # 逻辑判断函数
-def check_abnormal_headers(file_path):
-    # 读取Excel文件(默认读取第一个sheet)
-    df = pd.read_csv(file_path)
-
+def check_abnormal_headers(dataframe):
     abnormal_headers = []
-
     headers = config['logicalFailures']
 
     # 遍历所有列(表头)
     for header in headers:
         # 检查该列是否存在至少一个1
-        if (df[header] == 1).any():
+        if (dataframe[header] == 1).any():
             abnormal_headers.append(header)
 
     return abnormal_headers
 
 
 # 突发故障判断逻辑
-# def analyze_excel_data(file_path):
-#     """根据配置的区间规则分析 Excel 数据变化量"""
-#     df = pd.read_excel(file_path)
-#     results = []
-#     headers = config['mutationFailures']
-#     for columns in headers:
-#         column_name = columns["column"]
-#         normal_range = columns["normalRange"]
-#         degradation_range = columns['degradationInterval']
-#
-#         # 提取区间边界
-#         normal_upper = normal_range[1]
-#         degradation_upper = degradation_range[1]
-#
-#         # 数据预处理
-#         col_data = pd.to_numeric(df[column_name], errors="coerce").dropna()
-#
-#         # 至少需要2个数据点才能计算变化量
-#         if len(col_data) < 2:
-#             continue
-#
-#         # 计算相邻数据绝对变化量
-#         changes = col_data.diff().dropna().abs()
-#         max_change = changes.max()
-#
-#         # 动态判断逻辑
-#         if max_change > degradation_upper:
-#             results.append(f"{column_name}突变故障")
-#         elif max_change > normal_upper:
-#             # 获取实际变化方向(正/负)
-#             trend_value = col_data.diff().dropna().loc[changes.idxmax()]
-#             trend = "偏大" if trend_value > 0 else "偏小"
-#             results.append(f"{column_name}{trend}")
-#
-#     return sorted(results)
-
-# 突发故障判断逻辑
-def analyze_excel_data(file_path):
-    df = pd.read_csv(file_path)
+def analyze_excel_data(dataframe):
+    # df = pd.read_csv(csv_data, index_col=False, encoding='UTF-8')
     results = []
     faults = []              # 故障
     degradations = []         # 退化
@@ -84,13 +47,13 @@ def analyze_excel_data(file_path):
         normal_range = column_rule["normalRange"]
         degradation_range = column_rule["degradationInterval"]
 
-        normal_lower = normal_range[0]  # 正常区间下限(如0)
-        normal_upper = normal_range[1]  # 正常区间上限(如5)
-        degradation_lower = degradation_range[0]  # 退化区间上限(如10)
+        normal_lower = normal_range[0]  # 正常区间下限
+        normal_upper = normal_range[1]  # 正常区间上限
+        degradation_lower = degradation_range[0]  # 退化区间上限
         degradation_upper = degradation_range[1]
 
         # 数据预处理:转换为数值并去除非数值/空值
-        col_data = pd.to_numeric(df[column_name], errors="coerce").dropna()
+        col_data = pd.to_numeric(dataframe[column_name], errors="coerce").dropna()
 
         # 跳过无有效数据的列
         if len(col_data) == 0:
@@ -114,22 +77,37 @@ def analyze_excel_data(file_path):
         # 偏小:在退化下限和正常下限之间
         if ((col_data >= degradation_lower) & (col_data < normal_lower)).any():
             results.append(f"{column_name}偏小")
-            degradations.append(column_name)
+            degradations.append(f"{column_name}偏小")
 
     fault_result = {
             "results": results,
-            "故障": faults,
-            "退化": degradations
+            "fault": faults,
+            "degradation": degradations
         }
 
     return fault_result
 
 
 if __name__ == '__main__':
-    data_path = "../data/排气原始数据.csv"
-    result = check_abnormal_headers(data_path)
-    resa = analyze_excel_data(data_path)
-    print(result)
-    print(resa)
-
+    data_url = "http://169.254.117.72:9090/profile/upload/2025/05/16/排气原始数据_20250516140130A009.csv"
+    response = requests.get(data_url)
+    response.encoding = 'UTF-8'
+    csv_data = StringIO(response.text)
+    dataframe = pd.read_csv(csv_data, index_col=False, encoding='UTF-8')
+    result = check_abnormal_headers(dataframe)
+    resa = analyze_excel_data(dataframe)
+    fault = resa["fault"]
+    for i in fault:
+        result.append(i)
+
+    results = {
+        "code": 200,
+        "msg": "操作成功",
+        "data": {
+            "score": 58,
+            "fault": result,
+            "degradation": resa["degradation"]
+        }
+    }
+    print(results)
 

+ 15 - 7
api/test.py

@@ -1,13 +1,21 @@
 import pandas as pd
-
+import requests
 import csv
+from io import StringIO
 
 path = "../data/排气原始数据.csv"
 # 打开CSV文件
-# with open(path, 'r', newline='', encoding='utf-8') as file:
-#     reader = csv.reader(file)
-#     for row in reader:
-#         print(row)
+with open(path, 'r', newline='', encoding='utf-8') as file:
+    reader = csv.reader(file)
+    # for row in reader:
+        # print(row)
+
+# data = pd.read_csv(path)
+# print(data["发动机油门角度"])
 
-data = pd.read_csv(path)
-print(data["发动机油门角度"])
+data_url = "http://169.254.117.72:9090/profile/upload/2025/05/16/排气原始数据_20250516140130A009.csv"
+response = requests.get(data_url)
+response.encoding = 'UTF-8'
+csv_data = StringIO(response.text)
+dataframe = pd.read_csv(csv_data, index_col=False, encoding='UTF-8')
+print(dataframe)

Разница между файлами не показана из-за своего большого размера
+ 0 - 19556
data/排气原始数据.csv


BIN
data/排气原始数据.xlsx


Некоторые файлы не были показаны из-за большого количества измененных файлов