Ver Fonte

1.集成Hadoop
2.灵巧保障集成hadoop,所有文件上传,下载,保存都修改为hadoop方法
3.修改算法url参数获取

Gaokun Wang há 6 meses atrás
pai
commit
d156e20e37

+ 39 - 16
als-common/common-hadoop/src/main/java/org/eco/common/hadoop/hdfs/HadoopClient.java

@@ -1,27 +1,18 @@
 package org.eco.common.hadoop.hdfs;
 
+import cn.hutool.core.util.CharsetUtil;
 import jakarta.annotation.PostConstruct;
 import lombok.AllArgsConstructor;
 import lombok.Cleanup;
 import lombok.extern.slf4j.Slf4j;
 import org.apache.commons.lang3.StringUtils;
-import org.apache.hadoop.fs.FSDataInputStream;
-import org.apache.hadoop.fs.FSDataOutputStream;
-import org.apache.hadoop.fs.FileStatus;
 import org.apache.hadoop.fs.FileSystem;
-import org.apache.hadoop.fs.LocatedFileStatus;
-import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.fs.RemoteIterator;
+import org.apache.hadoop.fs.*;
 import org.apache.hadoop.io.IOUtils;
 import org.eco.common.hadoop.config.properties.HadoopProperties;
 import org.springframework.stereotype.Component;
-import org.springframework.stereotype.Service;
 
-import java.io.BufferedReader;
-import java.io.IOException;
-import java.io.InputStream;
-import java.io.InputStreamReader;
-import java.io.OutputStream;
+import java.io.*;
 import java.util.ArrayList;
 import java.util.HashMap;
 import java.util.List;
@@ -225,7 +216,7 @@ public class HadoopClient {
      *
      * @param filePath 文件路径
      */
-    public String readFile(String filePath) {
+    public String readFileContent(String filePath) {
         log.info("【读取文件内容】 开始读取, 文件路径: {}", filePath);
         Path newPath = new Path(filePath);
         InputStream in = null;
@@ -235,7 +226,7 @@ public class HadoopClient {
             in = fileSystem.open(newPath);
             String line; // 用来保存每行读取的内容
             // 设置字符编码,防止中文乱码
-            reader = new BufferedReader(new InputStreamReader(in, "GBK"));
+            reader = new BufferedReader(new InputStreamReader(in, CharsetUtil.CHARSET_UTF_8));
             // 读取第一行
             line = reader.readLine();
             // 如果 line 为空说明读完了
@@ -262,6 +253,40 @@ public class HadoopClient {
         return buffer.toString();
     }
 
+    /**
+     * 读取文件流
+     *
+     * @param filePath 文件路径
+     */
+    public InputStream readFileInputStream(String filePath) {
+        log.info("【读取文件内容】 开始读取, 文件路径: {}", filePath);
+        Path newPath = new Path(filePath);
+        try {
+            return fileSystem.open(newPath);
+        } catch (IOException e) {
+            log.error("readFileInputStream 异常:{}", e.getMessage());
+        }
+        return null;
+    }
+
+    /**
+     * 读取文件流
+     *
+     * @param filePath 文件路径
+     */
+    public File readFile(String filePath) {
+        log.info("【读取文件内容】 开始读取, 文件路径: {}", filePath);
+        Path newPath = new Path(filePath);
+        File file = null;
+        try (InputStream inputStream = fileSystem.open(newPath)) {
+
+//            fileSystem.
+        } catch (IOException e) {
+            log.error("readFileInputStream 异常:{}", e.getMessage());
+        }
+        return null;
+    }
+
     /**
      * 文件或文件夹重命名
      *
@@ -306,6 +331,4 @@ public class HadoopClient {
             log.error("copyFile 异常:{}", e.getMessage());
         }
     }
-
-
 }

+ 4 - 5
als-modules/agile-assurance/src/main/java/org/eco/als/controller/WarningController.java

@@ -16,13 +16,12 @@ import org.eco.als.service.IDataImportService;
 import org.eco.als.service.IJudgeFaultLogicService;
 import org.eco.als.service.IWarningService;
 import org.eco.als.utils.CsvUtils;
-import org.eco.common.core.constant.Constants;
 import org.eco.common.core.core.domain.CommonResult;
 import org.eco.common.core.core.domain.model.LoginUser;
 import org.eco.common.core.core.page.PageResult;
-import org.eco.common.core.utils.StringUtils;
 import org.eco.common.excel.core.ExcelResult;
 import org.eco.common.excel.utils.ExcelUtil;
+import org.eco.common.hadoop.hdfs.HadoopClient;
 import org.eco.common.log.annotation.Log;
 import org.eco.common.log.enums.BusinessType;
 import org.eco.common.security.utils.LoginHelper;
@@ -59,6 +58,8 @@ public class WarningController extends BaseController {
     private ISysOssService sysOssService;
     @Resource
     private IJudgeFaultLogicService judgeFaultLogicService;
+    @Resource
+    private HadoopClient hadoopClient;
 
     /**
      * 查询警告信息列表
@@ -168,8 +169,6 @@ public class WarningController extends BaseController {
         String columnData = judgeFaultLogicVo.getParameterColumn();
         DataImportVo dataImportVo = dataImportService.selectBySortieNo(sortieNo);
         SysOssVo ossVo = sysOssService.getById(dataImportVo.getOssId());
-        // 数据库资源地址
-        String path = StringUtils.substringAfter(ossVo.getFileName(), Constants.RESOURCE_PREFIX);
-        return CommonResult.success(String.valueOf(CsvUtils.getPlaybackByHeaders(path, Arrays.asList(columnData.split(",")), null)), "");
+        return CommonResult.success(String.valueOf(CsvUtils.getPlaybackByHeaders(hadoopClient.readFileContent(ossVo.getUrl()), Arrays.asList(columnData.split(",")), null)), "");
     }
 }

+ 18 - 20
als-modules/agile-assurance/src/main/java/org/eco/als/service/impl/AlgorithmService.java

@@ -17,11 +17,10 @@ import org.eco.als.domain.vo.ModelVo;
 import org.eco.als.service.*;
 import org.eco.als.utils.CsvUtils;
 import org.eco.common.core.config.EcoConfig;
-import org.eco.common.core.constant.Constants;
 import org.eco.common.core.exception.BusinessException;
 import org.eco.common.core.utils.HttpUtils;
-import org.eco.common.core.utils.StringUtils;
 import org.eco.common.core.utils.file.FileUtils;
+import org.eco.common.hadoop.hdfs.HadoopClient;
 import org.eco.system.domain.vo.SysOssVo;
 import org.eco.system.service.ISysOssService;
 import org.springframework.beans.factory.annotation.Value;
@@ -72,10 +71,15 @@ public class AlgorithmService implements IAlgorithmService {
     private ITaskService taskService;
     @Resource
     private IQaHistoryService qaHistoryService;
+    @Resource
+    private HadoopClient hadoopClient;
 
     @Value("${kgqa.ask-url}")
     private String askUrl;
 
+    @Value("${oss.download.url}")
+    private String url;
+
     private static ModelHttpVo sendHttp(ModelVo modelVo, Map<String, Object> map) {
         try {
             String result = HttpUtils.postJson(modelVo.getUrl(), map);
@@ -122,12 +126,13 @@ public class AlgorithmService implements IAlgorithmService {
         PreProcessing preProcessing = processingService.insert(processingBo);
         // 参数
         Map<String, Object> map = StrUtil.isBlank(algorithmBo.getParam()) ? new HashMap<>() : new HashMap<>(JSONUtil.parseObj(algorithmBo.getParam()));
-        map.put("url", sysOssVo.getUrl());
+        map.put("url", url + sysOssVo.getOssId());
         // 请求
         ModelHttpVo httpVo = sendHttp(modelVo, map);
         if (httpVo != null && httpVo.getStatus() == 200) {
             File resultFile = CsvUtils.jsonToFileCsv(httpVo.getData(), CsvUtils.extractFilename("model_result"));
-            SysOssVo sysOssVo2 = ossService.uploadHadoop(resultFile);
+            assert resultFile != null;
+            SysOssVo sysOssVo2 = ossService.uploadHadoop(resultFile, resultFile.getName());
             processingBo.setOssId(sysOssVo2.getOssId());
             processingBo.setStatus("1");
             processingBo.setVersion(preProcessing.getVersion());
@@ -181,7 +186,7 @@ public class AlgorithmService implements IAlgorithmService {
         });
         // 参数
         Map<String, Object> map = StrUtil.isBlank(algorithmBo.getParam()) ? new HashMap<>() : new HashMap<>(JSONUtil.parseObj(algorithmBo.getParam()));
-        map.put("url", sysOssVo.getUrl());
+        map.put("url", url + sysOssVo.getOssId());
         map.put("faultCode", warning.getCode());
         // 请求
         ModelHttpVo httpVo = sendHttp(modelVo, map);
@@ -239,7 +244,7 @@ public class AlgorithmService implements IAlgorithmService {
         FaultDiagnosis faultDiagnosis = faultDiagnosisService.insert(diagnosisBo);
         // 参数
         Map<String, Object> map = StrUtil.isBlank(algorithmBo.getParam()) ? new HashMap<>() : new HashMap<>(JSONUtil.parseObj(algorithmBo.getParam()));
-        map.put("url", sysOssVo.getUrl());
+        map.put("url", url + sysOssVo.getOssId());
         // 请求
         ModelHttpVo httpVo = sendHttp(modelVo, map);
         if (httpVo != null && httpVo.getStatus() == 200) {
@@ -280,12 +285,12 @@ public class AlgorithmService implements IAlgorithmService {
             throw new BusinessException("参数列不能为空,请检查!");
         }
         algorithmBo.setColumnData(modelVo.getColumnData().split(","));
-        List<String> urls = new ArrayList<>();
+        List<Long> ossIds = new ArrayList<>();
         dataImportVos.forEach(el -> {
             algorithmBo.setOssId(el.getOssId());
             SysOssVo sysOssVo = getSysOssVo(algorithmBo);
             if (ObjectUtil.isNotNull(sysOssVo)) {
-                urls.add(sysOssVo.getUrl());
+                ossIds.add(sysOssVo.getOssId());
             }
         });
         // 插入退化评估记录
@@ -301,7 +306,7 @@ public class AlgorithmService implements IAlgorithmService {
 
         // 参数
         Map<String, Object> map = StrUtil.isBlank(algorithmBo.getParam()) ? new HashMap<>() : new HashMap<>(JSONUtil.parseObj(algorithmBo.getParam()));
-        map.put("url", urls.getFirst());
+        map.put("url", url + ossIds.getFirst());
         // 请求
         ModelHttpVo httpVo = sendHttp(modelVo, map);
         if (httpVo != null && httpVo.getStatus() == 200) {
@@ -320,14 +325,11 @@ public class AlgorithmService implements IAlgorithmService {
         if (ObjectUtil.isNull(sysOss)) {
             throw new BusinessException("文件数据不存在!");
         }
-        String path = StringUtils.substringAfter(sysOss.getFileName(), Constants.RESOURCE_PREFIX);
         String tempPathCsv = EcoConfig.getTempPath() + "/" + CsvUtils.getName(FileUtils.getNameNotSuffix(sysOss.getOriginalName()));
-
-        JSONArray jsonArray = CsvUtils.getCsvDataByHeaders(path, Arrays.asList(algorithmBo.getColumnData()), null);
+        JSONArray jsonArray = CsvUtils.getCsvDataByHeaders(hadoopClient.readFileContent(sysOss.getUrl()), Arrays.asList(algorithmBo.getColumnData()), null);
         File file = CsvUtils.jsonToFileCsvByJsonArray(jsonArray, tempPathCsv);
-
         if (ObjectUtil.isNotNull(file)) {
-            sysOss = ossService.uploadHadoop(file);
+            sysOss = ossService.uploadHadoop(file, file.getName());
         }
         FileUtil.del(tempPathCsv);
         return sysOss;
@@ -359,17 +361,13 @@ public class AlgorithmService implements IAlgorithmService {
     @Override
     public List<String> getDataHeader(Long ossId) {
         SysOssVo ossVo = ossService.getById(ossId);
-        // 数据库资源地址
-        String path = StringUtils.substringAfter(ossVo.getFileName(), Constants.RESOURCE_PREFIX);
-        return CsvUtils.getCsvHeaders(path);
+        return CsvUtils.getCsvHeaders(hadoopClient.readFileContent(ossVo.getUrl()));
     }
 
     @Override
     public String getDataByOssId(Long ossId) {
         SysOssVo ossVo = ossService.getById(ossId);
-        // 数据库资源地址
-        String path = StringUtils.substringAfter(ossVo.getFileName(), Constants.RESOURCE_PREFIX);
-        return CsvUtils.fileCsvToJson(path).toString();
+        return CsvUtils.fileCsvToJson(hadoopClient.readFileContent(ossVo.getUrl())).toString();
     }
 
     @Override

+ 21 - 15
als-modules/agile-assurance/src/main/java/org/eco/als/service/impl/DataImportServiceImpl.java

@@ -13,12 +13,13 @@ import org.eco.als.mapper.DataImportMapper;
 import org.eco.als.service.IDataImportService;
 import org.eco.als.utils.CsvUtils;
 import org.eco.common.core.config.EcoConfig;
-import org.eco.common.core.constant.Constants;
 import org.eco.common.core.core.page.PageResult;
+import org.eco.common.core.exception.BusinessException;
 import org.eco.common.core.utils.MapstructUtils;
 import org.eco.common.core.utils.StringUtils;
 import org.eco.common.core.utils.file.FileUtils;
 import org.eco.common.excel.service.IExcelService;
+import org.eco.common.hadoop.hdfs.HadoopClient;
 import org.eco.common.orm.core.page.PageQuery;
 import org.eco.common.orm.core.service.impl.BaseServiceImpl;
 import org.eco.system.domain.vo.SysOssVo;
@@ -28,6 +29,8 @@ import org.springframework.stereotype.Service;
 import org.springframework.transaction.annotation.Transactional;
 
 import java.io.File;
+import java.io.IOException;
+import java.io.InputStream;
 import java.util.Arrays;
 import java.util.List;
 
@@ -52,6 +55,8 @@ public class DataImportServiceImpl extends BaseServiceImpl<DataImportMapper, Dat
     private IImportExportService importExportService;
     @Resource
     private ISysOssService ossService;
+    @Resource
+    private HadoopClient hadoopClient;
 
     @Override
     public QueryWrapper query() {
@@ -130,13 +135,7 @@ public class DataImportServiceImpl extends BaseServiceImpl<DataImportMapper, Dat
      */
     @Override
     public boolean insert(DataImportBo dataImportBo) {
-        SysOssVo ossVo = ossService.getById(dataImportBo.getOssId());
-        String path = StringUtils.substringAfter(ossVo.getFileName(), Constants.RESOURCE_PREFIX);
-        String pathCsv = EcoConfig.getTempPath() + "/" + CsvUtils.getName(FileUtils.getNameNotSuffix(ossVo.getOriginalName()));
-        File file = CsvUtils.excelToFileCsv(EcoConfig.getProfile() + path, pathCsv);
-        ossVo = ossService.uploadHadoop(file);
-        FileUtil.del(pathCsv);
-        dataImportBo.setOssId(ossVo.getOssId());
+        uploadHadoop(dataImportBo);
         DataImport dataImport = MapstructUtils.convert(dataImportBo, DataImport.class);
         return this.save(dataImport);//使用全局配置的雪花算法主键生成器生成ID值
     }
@@ -163,13 +162,7 @@ public class DataImportServiceImpl extends BaseServiceImpl<DataImportMapper, Dat
     public boolean update(DataImportBo dataImportBo) {
         DataImportVo dataImportVo = this.selectById(dataImportBo.getId());
         if (!ObjectUtil.equals(dataImportVo.getOssId(), dataImportBo.getOssId())) {
-            SysOssVo ossVo = ossService.getById(dataImportBo.getOssId());
-            String path = StringUtils.substringAfter(ossVo.getFileName(), Constants.RESOURCE_PREFIX);
-            String pathCsv = EcoConfig.getTempPath() + "/" + CsvUtils.getName(FileUtils.getNameNotSuffix(ossVo.getOriginalName()));
-            File file = CsvUtils.excelToFileCsv(EcoConfig.getProfile() + path, pathCsv);
-            ossVo = ossService.uploadHadoop(file);
-            FileUtil.del(pathCsv);
-            dataImportBo.setOssId(ossVo.getOssId());
+            uploadHadoop(dataImportBo);
         }
         DataImport dataImport = MapstructUtils.convert(dataImportBo, DataImport.class);
         if (ObjectUtil.isNotNull(dataImport) && ObjectUtil.isNotNull(dataImport.getId())) {
@@ -178,6 +171,19 @@ public class DataImportServiceImpl extends BaseServiceImpl<DataImportMapper, Dat
         return false;
     }
 
+    private void uploadHadoop(DataImportBo dataImportBo) {
+        SysOssVo ossVo = ossService.getById(dataImportBo.getOssId());
+        String pathCsv = EcoConfig.getTempPath() + "/" + CsvUtils.filename(FileUtils.getNameNotSuffix(ossVo.getOriginalName()));
+        try (InputStream inputStream = hadoopClient.readFileInputStream(ossVo.getUrl())) {
+            File file = CsvUtils.excelToFileCsv(inputStream, pathCsv);
+            ossVo = ossService.uploadHadoop(file, ossVo.getOriginalName());
+            FileUtil.del(pathCsv);
+            dataImportBo.setOssId(ossVo.getOssId());
+        } catch (IOException e) {
+            throw new BusinessException(e.getMessage());
+        }
+    }
+
     /**
      * 批量删除数据导入信息
      *

+ 34 - 46
als-modules/agile-assurance/src/main/java/org/eco/als/utils/CsvUtils.java

@@ -8,14 +8,13 @@ import cn.hutool.core.text.csv.CsvReader;
 import cn.hutool.core.text.csv.CsvRow;
 import cn.hutool.core.text.csv.CsvUtil;
 import cn.hutool.core.util.CharsetUtil;
-import cn.hutool.core.util.StrUtil;
 import cn.hutool.json.JSONArray;
 import cn.hutool.json.JSONObject;
 import cn.hutool.json.JSONUtil;
 import cn.hutool.poi.excel.ExcelReader;
 import cn.hutool.poi.excel.ExcelUtil;
 import lombok.extern.slf4j.Slf4j;
-import org.eco.common.core.config.EcoConfig;
+import org.apache.commons.io.FilenameUtils;
 import org.eco.common.core.utils.DateUtils;
 import org.eco.common.core.utils.StringUtils;
 import org.eco.common.core.utils.uuid.Seq;
@@ -51,6 +50,11 @@ public class CsvUtils {
         return StringUtils.format("{}/{}.{}", DateUtils.datePath(), fileName, CSV_TYPE);
     }
 
+    public static final String filename(String fileName) {
+        return StringUtils.format("{}/{}_{}.{}", DateUtils.datePath(),
+            FilenameUtils.getBaseName(fileName), Seq.getId(Seq.uploadSeqType), CSV_TYPE);
+    }
+
     /**
      * json 转 csv数据
      *
@@ -112,11 +116,10 @@ public class CsvUtils {
     /**
      * csv文件转Json
      *
-     * @param path path
+     * @param csvStr csvStr
      */
-    public static JSONArray fileCsvToJson(String path) {
-        String csvFilePath = EcoConfig.getProfile() + path;
-        List<CsvRow> rows = getCsvRowList(csvFilePath);
+    public static JSONArray fileCsvToJson(String csvStr) {
+        List<CsvRow> rows = getCsvRowList(csvStr);
         // 获取CSV表头,即第一行数据
         assert rows != null;
         List<String> headers = rows.get(0).getRawList();
@@ -133,44 +136,14 @@ public class CsvUtils {
         return jsonArray;
     }
 
-    /**
-     * 处理异常值
-     *
-     * @param path path
-     */
-    public static JSONArray abnormal(String path) {
-        JSONArray jsonArray = fileCsvToJson(path);
-        for (int i = 0; i < jsonArray.size(); i++) {
-            // 获取JSONArray中的JSONObject
-            JSONObject jsonObject = jsonArray.getJSONObject(i);
-            // 用于存储要删除的 key
-            List<String> keysToRemove = new ArrayList<>();
-            // 获取JSONObject的所有key
-            for (String key : jsonObject.keySet()) {
-                if (StrUtil.contains(key, "_是否异常") && StrUtil.equals("1", jsonObject.get(key).toString())) {
-                    jsonObject.set(StrUtil.subBefore(key, "_是否异常", true), "");
-                }
-                if (StrUtil.contains(key, "_是否异常")) {
-                    // 添加要删除的 key 到列表
-                    keysToRemove.add(key);
-                }
-            }
-            // 移除要删除的 key
-            for (String keyToRemove : keysToRemove) {
-                jsonObject.remove(keyToRemove);
-            }
-        }
-        return jsonArray;
-    }
-
     /**
      * 获取csv文件中 参数名称
      *
-     * @param csvFilePath csv文件地址
+     * @param csvStr csvStr
      * @return 参数名称列表
      */
-    public static List<String> getCsvHeaders(String csvFilePath) {
-        List<CsvRow> rows = getCsvRowList(EcoConfig.getProfile() + csvFilePath);
+    public static List<String> getCsvHeaders(String csvStr) {
+        List<CsvRow> rows = getCsvRowList(csvStr);
         assert rows != null;
         return rows.getFirst().getRawList();
     }
@@ -178,12 +151,12 @@ public class CsvUtils {
     /**
      * 根据头名称(参数名称)获取csv文件中参数数据
      *
-     * @param csvFilePath csv文件地址
+     * @param csvStr      csv文件
      * @param headerNames 参数名称
      * @return 参数名称列表
      */
-    public static JSONArray getCsvDataByHeaders(String csvFilePath, List<String> headerNames, Integer step) {
-        List<CsvRow> rows = getCsvRowList(EcoConfig.getProfile() + csvFilePath);
+    public static JSONArray getCsvDataByHeaders(String csvStr, List<String> headerNames, Integer step) {
+        List<CsvRow> rows = getCsvRowList(csvStr);
         // 获取CSV表头,即第一行数据
         List<String> headers = rows.get(0).getRawList();
         List<Integer> indexList = new ArrayList<>();
@@ -215,8 +188,8 @@ public class CsvUtils {
         return jsonArray;
     }
 
-    public static JSONObject getPlaybackByHeaders(String csvFilePath, List<String> headerNames, Integer step) {
-        List<CsvRow> rows = getCsvRowList(EcoConfig.getProfile() + csvFilePath);
+    public static JSONObject getPlaybackByHeaders(String csvStr, List<String> headerNames, Integer step) {
+        List<CsvRow> rows = getCsvRowList(csvStr);
         // 获取CSV表头,即第一行数据
         List<String> headers = rows.get(0).getRawList();
         List<Integer> indexList = new ArrayList<>();
@@ -268,11 +241,11 @@ public class CsvUtils {
         return result;
     }
 
-    private static List<CsvRow> getCsvRowList(String csvFilePath) {
+    private static List<CsvRow> getCsvRowList(String csvStr) {
         try {
             CsvReader reader = CsvUtil.getReader();
             // 从文件中读取CSV数据
-            CsvData data = reader.read(FileUtil.file(csvFilePath), CharsetUtil.CHARSET_UTF_8);
+            CsvData data = reader.readFromStr(csvStr);
             return data.getRows();
         } catch (Exception e) {
             log.error(e.getMessage());
@@ -295,6 +268,21 @@ public class CsvUtils {
         return jsonToFileCsv(json, csvFilePath);
     }
 
+    /**
+     * json转csv文件
+     *
+     * @param inputStream 数据流
+     * @param csvFilePath path
+     */
+    public static File excelToFileCsv(InputStream inputStream, String csvFilePath) {
+        // 读取Excel文件,获取ExcelReader
+        ExcelReader reader = ExcelUtil.getReader(inputStream);
+        // 通过ExcelReader将Excel文件读取为List<Map>
+        List<Map<String, Object>> readAll = reader.readAll();
+        String json = JSONUtil.toJsonStr(readAll);
+        return jsonToFileCsv(json, csvFilePath);
+    }
+
     /**
      * json转csv文件
      *

+ 0 - 5
als-modules/system/src/main/java/org/eco/system/controller/system/SysOssController.java

@@ -3,14 +3,12 @@ package org.eco.system.controller.system;
 
 import cn.dev33.satoken.annotation.SaCheckPermission;
 import cn.hutool.core.util.ObjectUtil;
-import jakarta.annotation.Resource;
 import jakarta.servlet.http.HttpServletResponse;
 import jakarta.validation.constraints.NotEmpty;
 import lombok.RequiredArgsConstructor;
 import lombok.extern.slf4j.Slf4j;
 import org.eco.common.core.core.domain.CommonResult;
 import org.eco.common.core.core.page.PageResult;
-import org.eco.common.hadoop.hdfs.HadoopClient;
 import org.eco.common.log.annotation.Log;
 import org.eco.common.log.enums.BusinessType;
 import org.eco.common.web.core.BaseController;
@@ -41,9 +39,6 @@ public class SysOssController extends BaseController {
 
     private final ISysOssService ossService;
 
-    @Resource
-    private HadoopClient hadoopClient;
-
     /**
      * 查询OSS对象存储列表
      */

+ 1 - 1
als-modules/system/src/main/java/org/eco/system/service/ISysOssService.java

@@ -33,7 +33,7 @@ public interface ISysOssService extends IBaseService<SysOss> {
 
     SysOssVo upload(File file);
 
-    SysOssVo uploadHadoop(File file);
+    SysOssVo uploadHadoop(File file, String fileName);
 
     void download(Long ossId, HttpServletResponse response) throws IOException;
 

+ 2 - 1
als-modules/system/src/main/java/org/eco/system/service/impl/CommonService.java

@@ -22,7 +22,7 @@ import java.util.Map;
 @Service
 public class CommonService implements ICommonService {
 
-    @Value("${server.port:8080}")
+    @Value("${server.port}")
     private Long port;
 
     @Override
@@ -48,6 +48,7 @@ public class CommonService implements ICommonService {
         }
     }
 
+
     public static String getIpAddress() {
         try {
             //从网卡中获取IP

+ 2 - 3
als-modules/system/src/main/java/org/eco/system/service/impl/SysOssServiceImpl.java

@@ -73,7 +73,6 @@ public class SysOssServiceImpl extends BaseServiceImpl<SysOssMapper, SysOss> imp
     @Value("${hadoop.directoryPath}")
     private String directoryPath;
 
-
     @Override
     public QueryWrapper query() {
         return super.query().from(SYS_OSS);
@@ -295,7 +294,7 @@ public class SysOssServiceImpl extends BaseServiceImpl<SysOssMapper, SysOss> imp
     }
 
     @Override
-    public SysOssVo uploadHadoop(File file) {
+    public SysOssVo uploadHadoop(File file, String fileName) {
         String originalFileName = file.getName();
         String suffix = StringUtils.substring(originalFileName, originalFileName.lastIndexOf("."), originalFileName.length());
         UploadResult uploadResult;
@@ -307,7 +306,7 @@ public class SysOssServiceImpl extends BaseServiceImpl<SysOssMapper, SysOss> imp
             throw new BusinessException(e.getMessage());
         }
         // 保存文件信息
-        return buildResultEntity(originalFileName, suffix, "hadoop", uploadResult);
+        return buildResultEntity(fileName, suffix, "hadoop", uploadResult);
     }
 
     private SysOssVo buildResultEntity(String originalFileName, String suffix, String configKey, UploadResult uploadResult) {

+ 3 - 0
als-start/src/main/resources/application-dev.yml

@@ -181,3 +181,6 @@ hadoop:
   directoryPath: /uploadPath/
   userName: root
   replication: 1
+oss:
+  download:
+    url: http://localhost:${server.port}/als/resource/oss/hadoop/download/

+ 3 - 0
als-start/src/main/resources/application-local.yml

@@ -177,3 +177,6 @@ hadoop:
   directoryPath: /uploadPath/
   userName: root
   replication: 1
+oss:
+  download:
+    url: http://localhost:${server.port}/als/resource/oss/hadoop/download/

+ 3 - 0
als-start/src/main/resources/application-prod.yml

@@ -173,3 +173,6 @@ easy-es:
   password: #es密码,若无则删去此行配置
 kgqa:
   ask-url: http://192.168.0.103:8000/kgqa/ask/
+oss:
+  download:
+    url: http://localhost:${server.port}/als/resource/oss/hadoop/download/