Explorar el Código

1.集成Hadoop
2.飞机编号查询装机信息返回值installationTime修改

Gaokun Wang hace 6 meses
padre
commit
a44b823e0d
Se han modificado 23 ficheros con 694 adiciones y 94 borrados
  1. 7 0
      als-common/common-bom/pom.xml
  2. 1 0
      als-common/common-core/src/main/java/org/eco/common/core/service/OssService.java
  3. 54 0
      als-common/common-hadoop/pom.xml
  4. 54 0
      als-common/common-hadoop/src/main/java/org/eco/common/hadoop/config/HadoopConfig.java
  5. 23 0
      als-common/common-hadoop/src/main/java/org/eco/common/hadoop/config/properties/HadoopProperties.java
  6. 311 0
      als-common/common-hadoop/src/main/java/org/eco/common/hadoop/hdfs/HadoopClient.java
  7. 60 0
      als-common/common-hadoop/src/main/java/org/eco/common/hadoop/utils/FileUtil.java
  8. 1 0
      als-common/common-hadoop/src/main/resources/META-INF/spring/org.springframework.boot.autoconfigure.AutoConfiguration.imports
  9. 1 1
      als-common/common-oss/src/main/java/org/eco/common/oss/factory/OssFactory.java
  10. 1 0
      als-common/pom.xml
  11. 1 8
      als-modules/agile-assurance/src/main/java/org/eco/als/controller/DataImportController.java
  12. 1 0
      als-modules/agile-assurance/src/main/java/org/eco/als/domain/vo/AirInstallVo.java
  13. 0 23
      als-modules/agile-assurance/src/main/java/org/eco/als/service/IDataImportService.java
  14. 2 2
      als-modules/agile-assurance/src/main/java/org/eco/als/service/impl/AlgorithmService.java
  15. 2 32
      als-modules/agile-assurance/src/main/java/org/eco/als/service/impl/DataImportServiceImpl.java
  16. 4 0
      als-modules/system/pom.xml
  17. 36 12
      als-modules/system/src/main/java/org/eco/system/controller/system/SysOssController.java
  18. 10 3
      als-modules/system/src/main/java/org/eco/system/service/ISysOssService.java
  19. 86 13
      als-modules/system/src/main/java/org/eco/system/service/impl/SysOssServiceImpl.java
  20. 8 0
      als-start/src/main/resources/application-dev.yml
  21. 8 0
      als-start/src/main/resources/application-local.yml
  22. 23 0
      pom.xml
  23. BIN
      接口.xlsx

+ 7 - 0
als-common/common-bom/pom.xml

@@ -40,6 +40,13 @@
                 <version>${revision}</version>
             </dependency>
 
+            <!-- hadoop模块 -->
+            <dependency>
+                <groupId>org.eco</groupId>
+                <artifactId>common-hadoop</artifactId>
+                <version>${revision}</version>
+            </dependency>
+
             <!-- 定时任务模块 -->
             <dependency>
                 <groupId>org.eco</groupId>

+ 1 - 0
als-common/common-core/src/main/java/org/eco/common/core/service/OssService.java

@@ -18,5 +18,6 @@ public interface OssService {
     String selectUrlByIds(String ossIds);
 
     UploadRes upload(byte[] file, String name);
+    UploadRes uploadHadoop(byte[] file, String name);
 
 }

+ 54 - 0
als-common/common-hadoop/pom.xml

@@ -0,0 +1,54 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<project xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+         xmlns="http://maven.apache.org/POM/4.0.0"
+         xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
+    <parent>
+        <groupId>org.eco</groupId>
+        <artifactId>als-common</artifactId>
+        <version>${revision}</version>
+    </parent>
+    <modelVersion>4.0.0</modelVersion>
+
+    <artifactId>common-hadoop</artifactId>
+
+    <description>
+        common-hadoop 文件管理
+    </description>
+
+    <dependencies>
+        <dependency>
+            <groupId>org.eco</groupId>
+            <artifactId>common-core</artifactId>
+        </dependency>
+        <dependency>
+            <groupId>org.apache.hadoop</groupId>
+            <artifactId>hadoop-client</artifactId>
+            <exclusions>
+                <exclusion>
+                    <groupId>org.slf4j</groupId>
+                    <artifactId>slf4j-log4j12</artifactId>
+                </exclusion>
+                <exclusion>
+                    <groupId>org.slf4j</groupId>
+                    <artifactId>slf4j-reload4j</artifactId>
+                </exclusion>
+                <exclusion>
+                    <groupId>javax.servlet</groupId>
+                    <artifactId>servlet-api</artifactId>
+                </exclusion>
+            </exclusions>
+        </dependency>
+
+        <dependency>
+            <groupId>org.eco</groupId>
+            <artifactId>common-json</artifactId>
+        </dependency>
+
+        <dependency>
+            <groupId>org.eco</groupId>
+            <artifactId>common-redis</artifactId>
+        </dependency>
+
+    </dependencies>
+
+</project>

+ 54 - 0
als-common/common-hadoop/src/main/java/org/eco/common/hadoop/config/HadoopConfig.java

@@ -0,0 +1,54 @@
+package org.eco.common.hadoop.config;
+
+import lombok.extern.slf4j.Slf4j;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.FileSystem;
+import org.eco.common.hadoop.config.properties.HadoopProperties;
+import org.eco.common.hadoop.hdfs.HadoopClient;
+import org.springframework.beans.factory.annotation.Autowired;
+import org.springframework.boot.autoconfigure.AutoConfiguration;
+import org.springframework.boot.autoconfigure.condition.ConditionalOnBean;
+import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty;
+import org.springframework.boot.context.properties.EnableConfigurationProperties;
+import org.springframework.context.annotation.Bean;
+
+import java.net.URI;
+
+/**
+ * @Description: HadoopConfig
+ * @Author: GaoKun Wang
+ * @Date: 2024/9/19
+ */
+@AutoConfiguration
+@EnableConfigurationProperties(HadoopProperties.class)
+@ConditionalOnProperty(value = "hadoop.enabled", havingValue = "true")
+@Slf4j
+public class HadoopConfig {
+
+    @Autowired
+    private HadoopProperties properties;
+
+    @Bean
+    public FileSystem fileSystem() {
+        System.setProperty("hadoop.home.dir", "D:\\tools\\hadoop\\hadoop-3.3.0");
+//        System.setProperty("HADOOP_USER_NAME", "root");
+        Configuration configuration = new Configuration();
+        configuration.set("dfs.replication", properties.getReplication());
+        configuration.set("fs.defaultFS", properties.getNameNode());
+        configuration.set("mapred.job.tracker", properties.getNameNode());
+        FileSystem fs = null;
+        try {
+            URI uri = new URI(properties.getDirectoryPath().trim());
+            fs = FileSystem.get(uri, configuration, properties.getUserName());
+        } catch (Exception e) {
+            log.error("", e);
+        }
+        return fs;
+    }
+
+    @Bean
+    @ConditionalOnBean(FileSystem.class)
+    public HadoopClient hadoopClient(FileSystem fs, HadoopProperties hadoopProperties) {
+        return new HadoopClient(fs, hadoopProperties);
+    }
+}

+ 23 - 0
als-common/common-hadoop/src/main/java/org/eco/common/hadoop/config/properties/HadoopProperties.java

@@ -0,0 +1,23 @@
+package org.eco.common.hadoop.config.properties;
+
+import lombok.Data;
+import org.springframework.boot.context.properties.ConfigurationProperties;
+
+/**
+ * @Description: HadoopProperties
+ * @Author: GaoKun Wang
+ * @Date: 2024/9/19
+ */
+@Data
+@ConfigurationProperties(prefix = "hadoop")
+public class HadoopProperties {
+    private String nameNode;
+    private String directoryPath;
+    private String userName;
+    private String replication = "1";
+    private Boolean enabled;
+
+    public String getPath() {
+        return this.nameNode + this.directoryPath;
+    }
+}

+ 311 - 0
als-common/common-hadoop/src/main/java/org/eco/common/hadoop/hdfs/HadoopClient.java

@@ -0,0 +1,311 @@
+package org.eco.common.hadoop.hdfs;
+
+import jakarta.annotation.PostConstruct;
+import lombok.AllArgsConstructor;
+import lombok.Cleanup;
+import lombok.extern.slf4j.Slf4j;
+import org.apache.commons.lang3.StringUtils;
+import org.apache.hadoop.fs.FSDataInputStream;
+import org.apache.hadoop.fs.FSDataOutputStream;
+import org.apache.hadoop.fs.FileStatus;
+import org.apache.hadoop.fs.FileSystem;
+import org.apache.hadoop.fs.LocatedFileStatus;
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.fs.RemoteIterator;
+import org.apache.hadoop.io.IOUtils;
+import org.eco.common.hadoop.config.properties.HadoopProperties;
+import org.springframework.stereotype.Component;
+import org.springframework.stereotype.Service;
+
+import java.io.BufferedReader;
+import java.io.IOException;
+import java.io.InputStream;
+import java.io.InputStreamReader;
+import java.io.OutputStream;
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+
+/**
+ * @Description: HadoopClient
+ * @Author: GaoKun Wang
+ * @Date: 2024/9/19
+ */
+@Slf4j
+@AllArgsConstructor
+@Component
+public class HadoopClient {
+    private FileSystem fileSystem;
+    private HadoopProperties hadoopProperties;
+
+
+    /**
+     * 初始化时创建目录,不存在才创建
+     */
+    @PostConstruct
+    public void init() {
+        mkdir(hadoopProperties.getDirectoryPath(), true);
+    }
+
+    /**
+     * 拼接文件路径地址
+     *
+     * @param folder 文件夹路径名称
+     * @return {@link String}
+     */
+    private String spliceFolderPath(String folder) {
+        if (StringUtils.isNotEmpty(folder)) {
+            return hadoopProperties.getPath() + folder;
+        } else {
+            return hadoopProperties.getPath();
+        }
+    }
+
+    /**
+     * 创建目录
+     *
+     * @param folderPath 文件夹路径名称
+     * @param create     不存在是否新建目录
+     * @return {@link boolean}
+     */
+    public boolean mkdir(String folderPath, boolean create) {
+        log.info("【开始创建目录】 文件夹路径名称: {}", folderPath);
+        boolean flag = false;
+        if (StringUtils.isEmpty(folderPath)) {
+            throw new IllegalArgumentException("folder不能为空");
+        }
+        try {
+            Path path = new Path(folderPath);
+            if (create) {
+                if (!fileSystem.exists(path)) {
+                    fileSystem.mkdirs(path);
+                }
+            }
+            if (fileSystem.getFileStatus(path).isDirectory()) {
+                flag = true;
+            }
+        } catch (Exception e) {
+            log.error("【创建目录失败】", e);
+        }
+        return flag;
+    }
+
+    /**
+     * 文件上传
+     *
+     * @param delSrc    指是否删除源文件,true为删除,默认为false
+     * @param overwrite 是否覆盖
+     * @param srcFile   源文件,上传文件路径
+     * @param destPath  fs的目标路径
+     */
+    public void copyFileToHDFS(boolean delSrc, boolean overwrite, String srcFile, String destPath) {
+        log.info("【文件上传】 开始上传, 上传文件路径: {}", destPath);
+        Path srcPath = new Path(srcFile);
+        // 目标路径
+        Path dstPath = new Path(destPath);
+        try {
+            // 文件上传
+            fileSystem.copyFromLocalFile(delSrc, overwrite, srcPath, dstPath);
+        } catch (IOException e) {
+            log.error("【文件上传失败】", e);
+        }
+    }
+
+
+    /**
+     * 删除文件或者文件目录
+     *
+     * @param path     文件目录路径
+     * @param fileName 文件名称
+     */
+    public void rmdir(String path, String fileName) {
+        log.info("【删除文件】 开始删除, 删除文件目录的路径: {}, 文件目录: {}", path, fileName);
+        try {
+            // 返回FileSystem对象
+            if (StringUtils.isNotBlank(fileName)) {
+                path = path + "/" + fileName;
+            }
+            // 删除文件或者文件目录  delete(Path f) 此方法已经弃用
+            fileSystem.delete(new Path(path), true);
+        } catch (IllegalArgumentException | IOException e) {
+            log.error("【删除文件失败】", e);
+        }
+    }
+
+    /**
+     * 下载文件
+     *
+     * @param path         路径
+     * @param fileName     文件名称
+     * @param outputStream 输出流
+     * @throws IOException 流异常
+     */
+    public void download(String path, String fileName, OutputStream outputStream) throws IOException {
+        log.info("【下载文件】 开始下载, 下载文件名称: {}", fileName);
+        @Cleanup InputStream is = fileSystem.open(new Path(path + fileName));
+        IOUtils.copyBytes(is, outputStream, is.available(), true);
+    }
+
+    /**
+     * 下载文件到本地
+     *
+     * @param path         文件路径
+     * @param downloadPath 本地下载路径
+     */
+    public void downloadFileFromLocal(String path, String downloadPath) {
+        log.info("【下载文件到本地】 开始下载, 文件路径: {}, 本地下载路径: {}", path, downloadPath);
+        // 上传路径
+        Path clientPath = new Path(path);
+        // 目标路径
+        Path serverPath = new Path(downloadPath);
+        try {
+            // 调用文件系统的文件复制方法,第一个参数是否删除原文件true为删除,默认为false
+            fileSystem.copyToLocalFile(false, clientPath, serverPath);
+        } catch (IOException e) {
+            log.error("downloadFileFromLocal 异常:{}", e.getMessage());
+        }
+    }
+
+    /**
+     * 获取目录信息
+     *
+     * @param path 目录路径
+     * @return {@link List}
+     */
+    public List<Map<String, Object>> getPathInfo(String path) {
+        log.info("【获取目录信息】 开始获取, 目录路径: {}", path);
+        FileStatus[] statusList;
+        List<Map<String, Object>> list = new ArrayList<>();
+        try {
+            statusList = fileSystem.listStatus(new Path(path));
+            if (null != statusList) {
+                for (FileStatus fileStatus : statusList) {
+                    Map<String, Object> map = new HashMap<>();
+                    map.put("filePath", fileStatus.getPath());
+                    map.put("fileStatus", fileStatus.toString());
+                    list.add(map);
+                }
+            }
+        } catch (IOException e) {
+            log.error("getPathInfo 异常:{}", e.getMessage());
+        }
+        return list;
+    }
+
+    /**
+     * 获取目录下文件列表
+     *
+     * @param path 目录路径
+     * @return {@link List}
+     */
+    public List<Map<String, String>> getFileList(String path) {
+        log.info("【获取目录下文件列表】 开始获取, 目录路径: {}", path);
+        List<Map<String, String>> list = new ArrayList<>();
+        try {
+            // 递归找到所有文件
+            RemoteIterator<LocatedFileStatus> filesList = fileSystem.listFiles(new Path(path), true);
+            while (filesList.hasNext()) {
+                LocatedFileStatus next = filesList.next();
+                String fileName = next.getPath().getName();
+                Path filePath = next.getPath();
+                Map<String, String> map = new HashMap<>();
+                map.put("fileName", fileName);
+                map.put("filePath", filePath.toString());
+                list.add(map);
+            }
+        } catch (IOException e) {
+            log.error("getFileList 异常:{}", e.getMessage());
+        }
+        return list;
+    }
+
+    /**
+     * 读取文件内容
+     *
+     * @param filePath 文件路径
+     */
+    public String readFile(String filePath) {
+        log.info("【读取文件内容】 开始读取, 文件路径: {}", filePath);
+        Path newPath = new Path(filePath);
+        InputStream in = null;
+        BufferedReader reader = null;
+        StringBuilder buffer = new StringBuilder();
+        try {
+            in = fileSystem.open(newPath);
+            String line; // 用来保存每行读取的内容
+            // 设置字符编码,防止中文乱码
+            reader = new BufferedReader(new InputStreamReader(in, "GBK"));
+            // 读取第一行
+            line = reader.readLine();
+            // 如果 line 为空说明读完了
+            while (line != null) {
+                // 将读到的内容添加到 buffer 中
+                buffer.append(line);
+                // 添加换行符
+                buffer.append("\n");
+                // 读取下一行
+                line = reader.readLine();
+            }
+        } catch (IOException e) {
+            log.error("readFile 异常:{}", e.getMessage());
+        } finally {
+            try {
+                if (reader != null) {
+                    reader.close();
+                }
+            } catch (IOException e) {
+                log.error("异常:{}", e.getMessage());
+            }
+            IOUtils.closeStream(in);
+        }
+        return buffer.toString();
+    }
+
+    /**
+     * 文件或文件夹重命名
+     *
+     * @param oldName 旧文件或旧文件夹名称
+     * @param newName 新文件或新文件夹名称
+     * @return 是否更改成功 true: 成功/false: 失败
+     */
+    public boolean renameFile(String oldName, String newName) {
+        log.info("【文件或文件夹重命名】 开始重命名, 旧文件或旧文件夹名称: {}, 新文件或新文件夹名称: {} ", oldName, newName);
+        boolean isOk = false;
+        Path oldPath = new Path(oldName);
+        Path newPath = new Path(newName);
+        try {
+            // 更改名称
+            isOk = fileSystem.rename(oldPath, newPath);
+        } catch (IOException e) {
+            log.error("renameFile 异常:{}", e.getMessage());
+        }
+        return isOk;
+    }
+
+    /**
+     * 复制文件
+     *
+     * @param sourcePath 复制路径
+     * @param targetPath 目标路径
+     */
+    public void copyFile(String sourcePath, String targetPath) {
+        log.info("【复制文件】 开始复制, 复制路径: {}, 目标路径: {}", sourcePath, targetPath);
+        // 原始文件路径
+        Path oldPath = new Path(sourcePath);
+        // 目标路径
+        Path newPath = new Path(targetPath);
+        FSDataInputStream inputStream;
+        FSDataOutputStream outputStream;
+        try {
+            inputStream = fileSystem.open(oldPath);
+            outputStream = fileSystem.create(newPath);
+            IOUtils.copyBytes(inputStream, outputStream, 1024 * 1024 * 64, false);
+            IOUtils.closeStreams(inputStream, outputStream);
+        } catch (IOException e) {
+            log.error("copyFile 异常:{}", e.getMessage());
+        }
+    }
+
+
+}

+ 60 - 0
als-common/common-hadoop/src/main/java/org/eco/common/hadoop/utils/FileUtil.java

@@ -0,0 +1,60 @@
+package org.eco.common.hadoop.utils;
+
+import lombok.extern.slf4j.Slf4j;
+import org.apache.hadoop.io.IOUtils;
+import org.springframework.web.multipart.MultipartFile;
+
+import java.io.File;
+import java.io.FileOutputStream;
+import java.io.InputStream;
+import java.io.OutputStream;
+import java.util.Objects;
+
+/**
+ * @Description: FileUtil
+ * @Author: GaoKun Wang
+ * @Date: 2024/9/19
+ */
+@Slf4j
+public class FileUtil {
+    /**
+     * 文件写入输入流
+     *
+     * @param in   输入流
+     * @param file 文件
+     */
+    public static void inputStreamToFile(InputStream in, File file) {
+        try {
+            OutputStream os = new FileOutputStream(file);
+            int bytesRead;
+            byte[] buffer = new byte[8192];
+            while ((bytesRead = in.read(buffer, 0, 8192)) != -1) {
+                os.write(buffer, 0, bytesRead);
+            }
+            IOUtils.closeStreams(os, in);
+        } catch (Exception e) {
+            log.error(e.getMessage(), e);
+        }
+    }
+
+    /**
+     * MultipartFile 转 File
+     *
+     * @param file 上传的文件
+     * @return {@link File}
+     */
+    public static File multipartFileToFile(MultipartFile file) {
+        File f = null;
+        try {
+            if (file != null && file.getSize() > 0) {
+                InputStream in = file.getInputStream();
+                f = new File(Objects.requireNonNull(file.getOriginalFilename()));
+                inputStreamToFile(in, f);
+            }
+            return f;
+        } catch (Exception e) {
+            log.error(e.getMessage(), e);
+            return f;
+        }
+    }
+}

+ 1 - 0
als-common/common-hadoop/src/main/resources/META-INF/spring/org.springframework.boot.autoconfigure.AutoConfiguration.imports

@@ -0,0 +1 @@
+org.eco.common.hadoop.config.HadoopConfig

+ 1 - 1
als-common/common-oss/src/main/java/org/eco/common/oss/factory/OssFactory.java

@@ -59,7 +59,7 @@ public class OssFactory {
             try {
                 client = CLIENT_CACHE.get(key);
                 if (client == null || !client.checkPropertiesSame(properties)) {
-                    if(StrUtil.equals(configKey, "local")) {
+                    if(StrUtil.equals(configKey, "hadoop")) {
                         CLIENT_CACHE.put(key, new OssClient(configKey, properties,null,null,null));
                     } else {
                         CLIENT_CACHE.put(key, new OssClient(configKey, properties));

+ 1 - 0
als-common/pom.xml

@@ -15,6 +15,7 @@
         <module>common-core</module>
         <module>common-encrypt</module>
         <module>common-excel</module>
+        <module>common-hadoop</module>
         <module>common-job</module>
         <module>common-json</module>
         <module>common-log</module>

+ 1 - 8
als-modules/agile-assurance/src/main/java/org/eco/als/controller/DataImportController.java

@@ -13,14 +13,7 @@ import org.eco.common.log.enums.BusinessType;
 import org.eco.common.web.annotation.RepeatSubmit;
 import org.eco.common.web.core.BaseController;
 import org.springframework.validation.annotation.Validated;
-import org.springframework.web.bind.annotation.DeleteMapping;
-import org.springframework.web.bind.annotation.GetMapping;
-import org.springframework.web.bind.annotation.PathVariable;
-import org.springframework.web.bind.annotation.PostMapping;
-import org.springframework.web.bind.annotation.PutMapping;
-import org.springframework.web.bind.annotation.RequestBody;
-import org.springframework.web.bind.annotation.RequestMapping;
-import org.springframework.web.bind.annotation.RestController;
+import org.springframework.web.bind.annotation.*;
 
 /**
  * 数据导入信息Controller

+ 1 - 0
als-modules/agile-assurance/src/main/java/org/eco/als/domain/vo/AirInstallVo.java

@@ -97,6 +97,7 @@ public class AirInstallVo extends BaseEntity implements Serializable {
      */
     @ExcelProperty(value = "装机时间")
     private String installTime;
+    private String installationTime;
 
     /**
      * 软件版本

+ 0 - 23
als-modules/agile-assurance/src/main/java/org/eco/als/service/IDataImportService.java

@@ -3,11 +3,8 @@ package org.eco.als.service;
 import org.eco.als.domain.DataImport;
 import org.eco.als.domain.bo.DataImportBo;
 import org.eco.als.domain.vo.DataImportVo;
-import org.eco.common.core.core.domain.model.LoginUser;
 import org.eco.common.core.core.page.PageResult;
 import org.eco.common.orm.core.service.IBaseService;
-import org.springframework.scheduling.annotation.Async;
-import org.springframework.web.multipart.MultipartFile;
 
 import java.util.List;
 
@@ -82,24 +79,4 @@ public interface IDataImportService extends IBaseService<DataImport> {
      */
     boolean deleteByIds(Long[] ids);
 
-    /**
-     * 异步导入
-     *
-     * @param file          导入的文件
-     * @param updateSupport 是否覆盖
-     * @param user          用户上下文信息
-     */
-    @Async
-    void asyncImportData(MultipartFile file, boolean updateSupport, LoginUser user);
-
-    /**
-     * asyncExport 异步导出
-     *
-     * @param listVo    数据列表
-     * @param sheetName 文件名称
-     * @param user      上下文
-     */
-    @Async
-    void asyncExport(List<DataImportVo> listVo, String sheetName, LoginUser user);
-
 }

+ 2 - 2
als-modules/agile-assurance/src/main/java/org/eco/als/service/impl/AlgorithmService.java

@@ -127,7 +127,7 @@ public class AlgorithmService implements IAlgorithmService {
         ModelHttpVo httpVo = sendHttp(modelVo, map);
         if (httpVo != null && httpVo.getStatus() == 200) {
             File resultFile = CsvUtils.jsonToFileCsv(httpVo.getData(), CsvUtils.extractFilename("model_result"));
-            SysOssVo sysOssVo2 = ossService.upload(resultFile);
+            SysOssVo sysOssVo2 = ossService.uploadHadoop(resultFile);
             processingBo.setOssId(sysOssVo2.getOssId());
             processingBo.setStatus("1");
             processingBo.setVersion(preProcessing.getVersion());
@@ -327,7 +327,7 @@ public class AlgorithmService implements IAlgorithmService {
         File file = CsvUtils.jsonToFileCsvByJsonArray(jsonArray, tempPathCsv);
 
         if (ObjectUtil.isNotNull(file)) {
-            sysOss = ossService.upload(file);
+            sysOss = ossService.uploadHadoop(file);
         }
         FileUtil.del(tempPathCsv);
         return sysOss;

+ 2 - 32
als-modules/agile-assurance/src/main/java/org/eco/als/service/impl/DataImportServiceImpl.java

@@ -8,20 +8,16 @@ import jakarta.annotation.Resource;
 import lombok.extern.slf4j.Slf4j;
 import org.eco.als.domain.DataImport;
 import org.eco.als.domain.bo.DataImportBo;
-import org.eco.als.domain.vo.DataImportImportVo;
 import org.eco.als.domain.vo.DataImportVo;
-import org.eco.als.listener.DataImportImportListener;
 import org.eco.als.mapper.DataImportMapper;
 import org.eco.als.service.IDataImportService;
 import org.eco.als.utils.CsvUtils;
 import org.eco.common.core.config.EcoConfig;
 import org.eco.common.core.constant.Constants;
-import org.eco.common.core.core.domain.model.LoginUser;
 import org.eco.common.core.core.page.PageResult;
 import org.eco.common.core.utils.MapstructUtils;
 import org.eco.common.core.utils.StringUtils;
 import org.eco.common.core.utils.file.FileUtils;
-import org.eco.common.excel.entity.ExcelResultRes;
 import org.eco.common.excel.service.IExcelService;
 import org.eco.common.orm.core.page.PageQuery;
 import org.eco.common.orm.core.service.impl.BaseServiceImpl;
@@ -30,10 +26,8 @@ import org.eco.system.service.IImportExportService;
 import org.eco.system.service.ISysOssService;
 import org.springframework.stereotype.Service;
 import org.springframework.transaction.annotation.Transactional;
-import org.springframework.web.multipart.MultipartFile;
 
 import java.io.File;
-import java.io.IOException;
 import java.util.Arrays;
 import java.util.List;
 
@@ -140,7 +134,7 @@ public class DataImportServiceImpl extends BaseServiceImpl<DataImportMapper, Dat
         String path = StringUtils.substringAfter(ossVo.getFileName(), Constants.RESOURCE_PREFIX);
         String pathCsv = EcoConfig.getTempPath() + "/" + CsvUtils.getName(FileUtils.getNameNotSuffix(ossVo.getOriginalName()));
         File file = CsvUtils.excelToFileCsv(EcoConfig.getProfile() + path, pathCsv);
-        ossVo = ossService.upload(file);
+        ossVo = ossService.uploadHadoop(file);
         FileUtil.del(pathCsv);
         dataImportBo.setOssId(ossVo.getOssId());
         DataImport dataImport = MapstructUtils.convert(dataImportBo, DataImport.class);
@@ -173,7 +167,7 @@ public class DataImportServiceImpl extends BaseServiceImpl<DataImportMapper, Dat
             String path = StringUtils.substringAfter(ossVo.getFileName(), Constants.RESOURCE_PREFIX);
             String pathCsv = EcoConfig.getTempPath() + "/" + CsvUtils.getName(FileUtils.getNameNotSuffix(ossVo.getOriginalName()));
             File file = CsvUtils.excelToFileCsv(EcoConfig.getProfile() + path, pathCsv);
-            ossVo = ossService.upload(file);
+            ossVo = ossService.uploadHadoop(file);
             FileUtil.del(pathCsv);
             dataImportBo.setOssId(ossVo.getOssId());
         }
@@ -184,30 +178,6 @@ public class DataImportServiceImpl extends BaseServiceImpl<DataImportMapper, Dat
         return false;
     }
 
-    @Override
-    public void asyncImportData(MultipartFile file, boolean updateSupport, LoginUser loginUser) {
-        ExcelResultRes result;
-        try {
-            String name = file.getOriginalFilename();
-            result = excelService.importExcel(file.getInputStream(), name, DataImportImportVo.class, new DataImportImportListener(updateSupport, loginUser));
-        } catch (IOException e) {
-            throw new RuntimeException(e);
-        }
-        boolean flag = importExportService.saveInfo(result, loginUser, "0");
-        if (flag) {
-            log.info("异步导入日志写入成功");
-        }
-    }
-
-    @Override
-    public void asyncExport(List<DataImportVo> listVo, String sheetName, LoginUser loginUser) {
-        ExcelResultRes result = excelService.exportExcel(listVo, sheetName, DataImportVo.class);
-        boolean flag = importExportService.saveInfo(result, loginUser, "1");
-        if (flag) {
-            log.info("异步导出日志写入成功");
-        }
-    }
-
     /**
      * 批量删除数据导入信息
      *

+ 4 - 0
als-modules/system/pom.xml

@@ -100,6 +100,10 @@
             <artifactId>spring-boot-starter-test</artifactId>
             <scope>test</scope>
         </dependency>
+        <dependency>
+            <groupId>org.eco</groupId>
+            <artifactId>common-hadoop</artifactId>
+        </dependency>
 
     </dependencies>
 

+ 36 - 12
als-modules/system/src/main/java/org/eco/system/controller/system/SysOssController.java

@@ -3,27 +3,24 @@ package org.eco.system.controller.system;
 
 import cn.dev33.satoken.annotation.SaCheckPermission;
 import cn.hutool.core.util.ObjectUtil;
-import org.eco.system.domain.bo.SysOssBo;
-import org.eco.system.domain.vo.SysOssVo;
+import jakarta.annotation.Resource;
+import jakarta.servlet.http.HttpServletResponse;
+import jakarta.validation.constraints.NotEmpty;
+import lombok.RequiredArgsConstructor;
+import lombok.extern.slf4j.Slf4j;
 import org.eco.common.core.core.domain.CommonResult;
 import org.eco.common.core.core.page.PageResult;
+import org.eco.common.hadoop.hdfs.HadoopClient;
 import org.eco.common.log.annotation.Log;
 import org.eco.common.log.enums.BusinessType;
 import org.eco.common.web.core.BaseController;
+import org.eco.system.domain.bo.SysOssBo;
 import org.eco.system.domain.vo.SysOssUploadVo;
+import org.eco.system.domain.vo.SysOssVo;
 import org.eco.system.service.ISysOssService;
-import jakarta.servlet.http.HttpServletResponse;
-import jakarta.validation.constraints.NotEmpty;
-import lombok.RequiredArgsConstructor;
 import org.springframework.http.MediaType;
 import org.springframework.validation.annotation.Validated;
-import org.springframework.web.bind.annotation.DeleteMapping;
-import org.springframework.web.bind.annotation.GetMapping;
-import org.springframework.web.bind.annotation.PathVariable;
-import org.springframework.web.bind.annotation.PostMapping;
-import org.springframework.web.bind.annotation.RequestMapping;
-import org.springframework.web.bind.annotation.RequestPart;
-import org.springframework.web.bind.annotation.RestController;
+import org.springframework.web.bind.annotation.*;
 import org.springframework.web.multipart.MultipartFile;
 
 import java.io.IOException;
@@ -35,6 +32,7 @@ import java.util.List;
  *
  * @author wgk
  */
+@Slf4j
 @Validated
 @RequiredArgsConstructor
 @RestController
@@ -43,6 +41,9 @@ public class SysOssController extends BaseController {
 
     private final ISysOssService ossService;
 
+    @Resource
+    private HadoopClient hadoopClient;
+
     /**
      * 查询OSS对象存储列表
      */
@@ -85,6 +86,29 @@ public class SysOssController extends BaseController {
         return CommonResult.success(uploadVo);
     }
 
+    @PostMapping(value = "/hadoop/upload", consumes = MediaType.MULTIPART_FORM_DATA_VALUE)
+    public CommonResult<SysOssUploadVo> uploadHadoop(@RequestPart("file") MultipartFile file) {
+        if (ObjectUtil.isNull(file)) {
+            return CommonResult.fail("上传文件不能为空");
+        }
+        SysOssVo oss = ossService.uploadHadoop(file);
+        SysOssUploadVo uploadVo = new SysOssUploadVo();
+        uploadVo.setUrl(oss.getUrl());
+        uploadVo.setFileName(oss.getOriginalName());
+        uploadVo.setOssId(oss.getOssId().toString());
+        return CommonResult.success(uploadVo);
+    }
+
+    /**
+     * 下载OSS对象
+     *
+     * @param ossId OSS对象ID
+     */
+    @GetMapping("/hadoop/download/{ossId}")
+    public void downloadHadoop(@PathVariable Long ossId, HttpServletResponse response) throws IOException {
+        ossService.downloadHadoop(ossId, response);
+    }
+
     /**
      * 下载OSS对象
      *

+ 10 - 3
als-modules/system/src/main/java/org/eco/system/service/ISysOssService.java

@@ -1,11 +1,11 @@
 package org.eco.system.service;
 
+import jakarta.servlet.http.HttpServletResponse;
+import org.eco.common.core.core.page.PageResult;
+import org.eco.common.orm.core.service.IBaseService;
 import org.eco.system.domain.SysOss;
 import org.eco.system.domain.bo.SysOssBo;
 import org.eco.system.domain.vo.SysOssVo;
-import org.eco.common.core.core.page.PageResult;
-import org.eco.common.orm.core.service.IBaseService;
-import jakarta.servlet.http.HttpServletResponse;
 import org.springframework.web.multipart.MultipartFile;
 
 import java.io.File;
@@ -29,12 +29,19 @@ public interface ISysOssService extends IBaseService<SysOss> {
 
     SysOssVo upload(MultipartFile file);
 
+    SysOssVo uploadHadoop(MultipartFile file);
+
     SysOssVo upload(File file);
 
+    SysOssVo uploadHadoop(File file);
+
     void download(Long ossId, HttpServletResponse response) throws IOException;
 
+    void downloadHadoop(Long ossId, HttpServletResponse response) throws IOException;
+
     InputStream getFileStream(Long ossId);
 
     Boolean deleteWithValidByIds(Collection<Long> ids, Boolean isValid);
+
     SysOssVo saveResultEntity(String originalFileName, String suffix, String url, String fileName);
 }

+ 86 - 13
als-modules/system/src/main/java/org/eco/system/service/impl/SysOssServiceImpl.java

@@ -12,7 +12,6 @@ import jakarta.servlet.http.HttpServletResponse;
 import lombok.RequiredArgsConstructor;
 import lombok.extern.slf4j.Slf4j;
 import org.eco.common.core.config.EcoConfig;
-import org.eco.common.core.constant.CacheNames;
 import org.eco.common.core.constant.Constants;
 import org.eco.common.core.core.domain.UploadRes;
 import org.eco.common.core.core.page.PageResult;
@@ -23,6 +22,7 @@ import org.eco.common.core.utils.SpringUtils;
 import org.eco.common.core.utils.StreamUtils;
 import org.eco.common.core.utils.StringUtils;
 import org.eco.common.core.utils.file.FileUtils;
+import org.eco.common.hadoop.hdfs.HadoopClient;
 import org.eco.common.orm.core.page.PageQuery;
 import org.eco.common.orm.core.service.impl.BaseServiceImpl;
 import org.eco.common.oss.core.OssClient;
@@ -32,11 +32,10 @@ import org.eco.common.oss.factory.OssFactory;
 import org.eco.system.domain.SysOss;
 import org.eco.system.domain.bo.SysOssBo;
 import org.eco.system.domain.vo.SysOssVo;
-import org.eco.system.mapper.SysDeptMapper;
 import org.eco.system.mapper.SysOssMapper;
 import org.eco.system.service.ICommonService;
 import org.eco.system.service.ISysOssService;
-import org.springframework.cache.annotation.Cacheable;
+import org.springframework.beans.factory.annotation.Value;
 import org.springframework.http.MediaType;
 import org.springframework.stereotype.Service;
 import org.springframework.web.multipart.MultipartFile;
@@ -49,6 +48,7 @@ import java.util.Collection;
 import java.util.List;
 import java.util.Map;
 
+import static org.eco.common.core.utils.file.FileUploadUtils.extractFilename;
 import static org.eco.common.core.utils.file.FileUtils.percentEncode;
 import static org.eco.system.domain.table.SysOssTableDef.SYS_OSS;
 
@@ -67,6 +67,11 @@ public class SysOssServiceImpl extends BaseServiceImpl<SysOssMapper, SysOss> imp
 
     @Resource
     private ICommonService commonService;
+    @Resource
+    private HadoopClient hadoopClient;
+
+    @Value("${hadoop.directoryPath}")
+    private String directoryPath;
 
 
     @Override
@@ -136,7 +141,7 @@ public class SysOssServiceImpl extends BaseServiceImpl<SysOssMapper, SysOss> imp
 
     @Override
     public void download(Long ossId, HttpServletResponse response) throws IOException {
-        SysOssVo sysOss = SpringUtils.getAopProxy(this).getById(ossId);
+        SysOssVo sysOss = this.getById(ossId);
         if (ObjectUtil.isNull(sysOss)) {
             throw new BusinessException("文件数据不存在!");
         }
@@ -152,6 +157,21 @@ public class SysOssServiceImpl extends BaseServiceImpl<SysOssMapper, SysOss> imp
         }
     }
 
+    @Override
+    public void downloadHadoop(Long ossId, HttpServletResponse response) throws IOException {
+        SysOssVo sysOss = this.getById(ossId);
+        if (ObjectUtil.isNull(sysOss)) {
+            throw new BusinessException("文件数据不存在!");
+        }
+        FileUtils.setAttachmentResponseHeader(response, sysOss.getOriginalName());
+        response.setContentType(MediaType.APPLICATION_OCTET_STREAM_VALUE + "; charset=UTF-8");
+        try {
+            hadoopClient.download(sysOss.getUrl(), "", response.getOutputStream());
+        } catch (Exception e) {
+            throw new BusinessException(e.getMessage());
+        }
+    }
+
     @Override
     public InputStream getFileStream(Long ossId) {
         SysOssVo sysOss = SpringUtils.getAopProxy(this).getById(ossId);
@@ -175,7 +195,7 @@ public class SysOssServiceImpl extends BaseServiceImpl<SysOssMapper, SysOss> imp
         OssClient storage = OssFactory.instance();
         UploadResult uploadResult;
         try {
-            if (StrUtil.equals(storage.getConfigKey(), "local")) {
+            if (StrUtil.equals(storage.getConfigKey(), "hadoop")) {
                 Map<String, String> map = commonService.uploadFile(file);
                 uploadResult = UploadResult.builder().url(map.get("url")).filename(map.get("fileName")).build();
             } else {
@@ -188,6 +208,25 @@ public class SysOssServiceImpl extends BaseServiceImpl<SysOssMapper, SysOss> imp
         return buildResultEntity(originalFilename, suffix, storage.getConfigKey(), uploadResult);
     }
 
+    @Override
+    public SysOssVo uploadHadoop(MultipartFile file) {
+        String originalFilename = file.getOriginalFilename();
+        assert originalFilename != null;
+        String suffix = StringUtils.substring(originalFilename, originalFilename.lastIndexOf("."), originalFilename.length());
+        UploadResult uploadResult;
+        try {
+            String fileName = extractFilename(file);
+            String uploadPath = directoryPath + fileName;
+            hadoopClient.copyFileToHDFS(false, true, org.eco.common.hadoop.utils.FileUtil.multipartFileToFile(file).getPath(), uploadPath);
+            uploadResult = UploadResult.builder().url(uploadPath).filename(uploadPath).build();
+
+        } catch (Exception e) {
+            throw new BusinessException(e.getMessage());
+        }
+        // 保存文件信息
+        return buildResultEntity(originalFilename, suffix, "hadoop", uploadResult);
+    }
+
     @Override
     public UploadRes upload(byte[] file, String name) {
         String suffix = StringUtils.substring(name, name.lastIndexOf("."), name.length());
@@ -196,7 +235,7 @@ public class SysOssServiceImpl extends BaseServiceImpl<SysOssMapper, SysOss> imp
         File tempFile = null;
         try {
             tempFile = File.createTempFile(name, suffix);
-            if (StrUtil.equals(storage.getConfigKey(), "local")) {
+            if (StrUtil.equals(storage.getConfigKey(), "hadoop")) {
                 Map<String, String> map = commonService.uploadFile(FileUtils.fileToMultipartFile(tempFile));
                 uploadResult = UploadResult.builder().url(map.get("url")).filename(map.get("fileName")).build();
             } else {
@@ -210,21 +249,39 @@ public class SysOssServiceImpl extends BaseServiceImpl<SysOssMapper, SysOss> imp
                 FileUtil.del(tempFile);
             }
         }
-
-
         SysOssVo ossVo = buildResultEntity(name, suffix, storage.getConfigKey(), uploadResult);
         return UploadRes.builder().name(name).url(uploadResult.getUrl()).ossId(ossVo.getOssId()).build();
     }
+    @Override
+    public UploadRes uploadHadoop(byte[] file, String name) {
+        String suffix = StringUtils.substring(name, name.lastIndexOf("."), name.length());
+        UploadResult uploadResult;
+        File tempFile = null;
+        try {
+            tempFile = File.createTempFile(name, suffix);
+            String uploadPath = directoryPath + name;
+            hadoopClient.copyFileToHDFS(false, true, tempFile.getPath(), uploadPath);
+            uploadResult = UploadResult.builder().url(uploadPath).filename(uploadPath).build();
+        } catch (IOException e) {
+            throw new BusinessException(e.getMessage());
+        } finally {
+            // 删除临时文件(可选)
+            if (tempFile != null) {
+                FileUtil.del(tempFile);
+            }
+        }
+        SysOssVo ossVo = buildResultEntity(name, suffix, "hadoop", uploadResult);
+        return UploadRes.builder().name(name).url(uploadResult.getUrl()).ossId(ossVo.getOssId()).build();
+    }
 
     @Override
     public SysOssVo upload(File file) {
         String originalFileName = file.getName();
         String suffix = StringUtils.substring(originalFileName, originalFileName.lastIndexOf("."), originalFileName.length());
         OssClient storage = OssFactory.instance();
-
         UploadResult uploadResult;
         try {
-            if (StrUtil.equals(storage.getConfigKey(), "local")) {
+            if (StrUtil.equals(storage.getConfigKey(), "hadoop")) {
                 Map<String, String> map = commonService.uploadFile(FileUtils.fileToMultipartFile(file));
                 uploadResult = UploadResult.builder().url(map.get("url")).filename(map.get("fileName")).build();
             } else {
@@ -237,6 +294,22 @@ public class SysOssServiceImpl extends BaseServiceImpl<SysOssMapper, SysOss> imp
         return buildResultEntity(originalFileName, suffix, storage.getConfigKey(), uploadResult);
     }
 
+    @Override
+    public SysOssVo uploadHadoop(File file) {
+        String originalFileName = file.getName();
+        String suffix = StringUtils.substring(originalFileName, originalFileName.lastIndexOf("."), originalFileName.length());
+        UploadResult uploadResult;
+        try {
+            String uploadPath = directoryPath + originalFileName;
+            hadoopClient.copyFileToHDFS(false, true, file.getPath(), uploadPath);
+            uploadResult = UploadResult.builder().url(uploadPath).filename(uploadPath).build();
+        } catch (Exception e) {
+            throw new BusinessException(e.getMessage());
+        }
+        // 保存文件信息
+        return buildResultEntity(originalFileName, suffix, "hadoop", uploadResult);
+    }
+
     private SysOssVo buildResultEntity(String originalFileName, String suffix, String configKey, UploadResult uploadResult) {
         SysOss oss = new SysOss();
         oss.setUrl(uploadResult.getUrl());
@@ -258,7 +331,7 @@ public class SysOssServiceImpl extends BaseServiceImpl<SysOssMapper, SysOss> imp
 
         List<SysOss> list = this.listByIds(ids);
         for (SysOss sysOss : list) {
-            if (StrUtil.equals("local", sysOss.getService())) {
+            if (StrUtil.equals("hadoop", sysOss.getService())) {
                 // 删除本地文件
                 String filePath = EcoConfig.getProfile() + StringUtils.substringAfter(sysOss.getFileName(), Constants.RESOURCE_PREFIX);
                 boolean deleted = FileUtil.del(filePath);
@@ -280,7 +353,7 @@ public class SysOssServiceImpl extends BaseServiceImpl<SysOssMapper, SysOss> imp
      * @return oss 匹配Url的OSS对象
      */
     private SysOssVo matchingUrl(SysOssVo oss) {
-        if (StrUtil.equals("local", oss.getService())) {
+        if (StrUtil.equals("hadoop", oss.getService())) {
             return oss;
         }
         OssClient storage = OssFactory.instance(oss.getService());
@@ -298,7 +371,7 @@ public class SysOssServiceImpl extends BaseServiceImpl<SysOssMapper, SysOss> imp
         oss.setFileSuffix(suffix);
         oss.setFileName(fileName);
         oss.setOriginalName(originalFileName);
-        oss.setService("local");
+        oss.setService("hadoop");
         this.save(oss);
         SysOssVo sysOssVo = MapstructUtils.convert(oss, SysOssVo.class);
         assert sysOssVo != null;

+ 8 - 0
als-start/src/main/resources/application-dev.yml

@@ -173,3 +173,11 @@ easy-es:
   password: #es密码,若无则删去此行配置
 kgqa:
   ask-url: http://127.0.0.1:8000/kgqa/ask/
+--- # Hadoop client
+hadoop:
+  # 是否开启hadoop
+  enabled: true
+  nameNode: hdfs://127.0.0.1:19900/
+  directoryPath: /uploadPath/
+  userName: root
+  replication: 1

+ 8 - 0
als-start/src/main/resources/application-local.yml

@@ -169,3 +169,11 @@ easy-es:
   password: #es密码,若无则删去此行配置
 kgqa:
   ask-url: http://192.168.0.103:8000/kgqa/ask/
+--- # Hadoop client
+hadoop:
+  # 是否开启hadoop
+  enabled: true
+  nameNode: hdfs://127.0.0.1:19900/
+  directoryPath: /uploadPath/
+  userName: root
+  replication: 1

+ 23 - 0
pom.xml

@@ -66,6 +66,7 @@
         <easy-es.version>2.0.0-beta4</easy-es.version>
         <es.version>7.12.1</es.version>
         <ql.version>3.3.4</ql.version>
+        <hadoop.client.version>3.3.6</hadoop.client.version>
     </properties>
 
     <profiles>
@@ -366,6 +367,28 @@
                 <version>${alibaba-ttl.version}</version>
             </dependency>
 
+
+            <!-- hadoop -->
+            <dependency>
+                <groupId>org.apache.hadoop</groupId>
+                <artifactId>hadoop-client</artifactId>
+                <version>${hadoop.client.version}</version>
+                <exclusions>
+                    <exclusion>
+                        <groupId>org.slf4j</groupId>
+                        <artifactId>slf4j-log4j12</artifactId>
+                    </exclusion>
+                    <exclusion>
+                        <groupId>org.slf4j</groupId>
+                        <artifactId>slf4j-reload4j</artifactId>
+                    </exclusion>
+                    <exclusion>
+                        <groupId>javax.servlet</groupId>
+                        <artifactId>servlet-api</artifactId>
+                    </exclusion>
+                </exclusions>
+            </dependency>
+
             <!-- OSS 配置 -->
             <!--  AWS SDK for Java 2.x  -->
             <dependency>

BIN
接口.xlsx