扫描结构更改

This commit is contained in:
lilin 2025-05-26 15:56:19 +08:00
parent 1e925a2e61
commit 776209bdd0
5 changed files with 224 additions and 7 deletions

View File

@ -3,6 +3,8 @@ package com.yfd.platform.modules.specialDocument.mapper;
import com.yfd.platform.modules.specialDocument.domain.Files;
import com.baomidou.mybatisplus.core.mapper.BaseMapper;
import java.util.List;
/**
* <p>
* 专项文档表 Mapper 接口
@ -13,4 +15,5 @@ import com.baomidou.mybatisplus.core.mapper.BaseMapper;
*/
public interface FilesMapper extends BaseMapper<Files> {
int batchInsertFiles(List<Files> filesList);
}

View File

@ -3,6 +3,8 @@ package com.yfd.platform.modules.specialDocument.mapper;
import com.yfd.platform.modules.specialDocument.domain.Nodes;
import com.baomidou.mybatisplus.core.mapper.BaseMapper;
import java.util.List;
/**
* <p>
* 专项文档节点表 Mapper 接口
@ -13,4 +15,7 @@ import com.baomidou.mybatisplus.core.mapper.BaseMapper;
*/
public interface NodesMapper extends BaseMapper<Nodes> {
int batchInsertIgnore(List<Nodes> nodesList);
int updateParentIdByPathHierarchy();
}

View File

@ -1,7 +1,9 @@
package com.yfd.platform.modules.specialDocument.service.impl;
import cn.hutool.core.collection.CollUtil;
import cn.hutool.core.util.IdUtil;
import cn.hutool.core.util.ObjUtil;
import cn.hutool.core.io.FileUtil;
import com.amazonaws.util.IOUtils;
import com.baomidou.mybatisplus.core.conditions.query.LambdaQueryWrapper;
import com.baomidou.mybatisplus.core.conditions.query.QueryWrapper;
@ -671,8 +673,14 @@ public class NodesServiceImpl extends ServiceImpl<NodesMapper, Nodes> implements
Project project = projectMapper.selectById(id);
// 查询本地文件路径根目录 E:\yun
QueryWrapper<StorageSourceConfig> queryWrapper = new QueryWrapper<>();
queryWrapper.eq("name", "filePath");
queryWrapper.eq("type", "sdlocal");
StorageSourceConfig storageSourceConfig = storageSourceConfigMapper.selectOne(queryWrapper);
//获取文件列表
String absolutePath = "/" + project.getProjectName() + "/";
String absolutePath = project.getProjectName() + "/";
FileListRequest fileListRequest = buildFileRequest(absolutePath);
String storageKey = fileListRequest.getStorageKey();
Integer storageId = storageSourceService.findIdByKey(storageKey);
@ -682,6 +690,45 @@ public class NodesServiceImpl extends ServiceImpl<NodesMapper, Nodes> implements
// 处理请求参数默认值
fileListRequest.handleDefaultValue();
AbstractBaseFileService<?> fileService = storageSourceContext.getByStorageId(storageId);
//获取执行时间
long startFileListData = System.currentTimeMillis();
List<FileItemResult> fileItemListss = fileService.fileListData("", project.getProjectName());
LOGGER.info("[fileListData] 耗时 {} ms | 数据量: {} 条",
System.currentTimeMillis() - startFileListData,
fileItemListss.size());
//通过hutool获取路径下面的文件和文件夹// 递归获取所有内容包含文件和目录需自定义过滤
String path = storageSourceConfig.getValue() + absolutePath;
long startHutoolFileListData = System.currentTimeMillis();
List<File> allContents = FileUtil.loopFiles(path, file -> true);
LOGGER.info("[allContents] 耗时 {} ms | 数据量: {} 条",
System.currentTimeMillis() - startHutoolFileListData,
allContents.size());
// 过滤文件夹添加第二个耗时日志
long startFilterFolders = System.currentTimeMillis();
List<FileItemResult> filteredFolders = fileItemListss.stream()
.filter(item -> item.getType() == FileTypeEnum.FOLDER && item.getPath() != null && !item.getPath().isEmpty())
.collect(Collectors.toList());
LOGGER.info("[过滤文件夹] 耗时 {} ms | 过滤后数量: {} 条",
System.currentTimeMillis() - startFilterFolders,
filteredFolders.size());
// 过滤文件添加第三个耗时日志
long startFilterFiles = System.currentTimeMillis();
List<FileItemResult> filteredFiles = fileItemListss.stream()
.filter(item -> item.getType() == FileTypeEnum.FILE && item.getPath() != null && !item.getPath().isEmpty())
.collect(Collectors.toList());
LOGGER.info("[过滤文件] 耗时 {} ms | 过滤后数量: {} 条",
System.currentTimeMillis() - startFilterFiles,
filteredFiles.size());
//todo 首先获取两个集合 对比出数据库中没有的文件夹以及文件递归增加
List<FileItemResult> fileItemList = fileService.fileList(fileListRequest.getPath());
if (fileItemList.size() == 0) {
@ -689,6 +736,115 @@ public class NodesServiceImpl extends ServiceImpl<NodesMapper, Nodes> implements
}
//获取当前登录用户 上传人是当前登录人
UsernamePasswordAuthenticationToken authentication =
(UsernamePasswordAuthenticationToken) SecurityContextHolder.getContext().getAuthentication();
LoginUser loginuser = null;
if (authentication != null) {
loginuser = (LoginUser) authentication.getPrincipal();
}
// 设置当前时间
LocalDateTime now = LocalDateTime.now();
// 转换为 Timestamp
Timestamp currentTime = Timestamp.valueOf(now);
//处理文件夹新增到表结构
List<Nodes> nodesList = new ArrayList<>();
long startBuildNodes = System.currentTimeMillis();
for (FileItemResult FolderItem : filteredFolders) {
Nodes nodes = new Nodes();
nodes.setId(IdUtil.fastSimpleUUID());
nodes.setProjectId(id);
nodes.setParentId("00");
nodes.setNodeOrder(1);
// 1. 处理空路径确保默认值
String filePath = FolderItem.getPath() != null ? FolderItem.getPath() : "/";
// 2. 路径标准化处理统一替换所有分隔符为 "/"并合并连续分隔符
String normalizedPath = filePath
.replaceAll("[/\\\\]+", "/") // "\" 或混合分隔符统一为 "/"
.replaceAll("/+$", ""); // 移除末尾的 "/"可选根据需求
// 3. 分割路径段
String[] pathSegments = normalizedPath.split("/");
// 4. 计算有效层级深度
long depth = Arrays.stream(pathSegments)
.filter(s -> !s.isEmpty())
.count();
// 5. 生成 nodeType两位数字
String nodeType = String.format("%02d", depth);
nodes.setNodeType(nodeType);
nodes.setNodeName(FolderItem.getName());
nodes.setCustom3(ensurePathFormat(FolderItem.getPath()));
nodes.setCreateTime(currentTime);
nodesList.add(nodes);
}
LOGGER.info("[构建节点列表] 耗时 {} ms | 待处理数量: {} 条",
System.currentTimeMillis() - startBuildNodes,
nodesList.size());
// 批量插入节点表忽略重复
if (!nodesList.isEmpty()) {
long startBatchInsert = System.currentTimeMillis();
int affectedRows = nodesMapper.batchInsertIgnore(nodesList);
LOGGER.info("[批量插入节点表] 耗时 {} ms | 实际新增数量: {} 条",
System.currentTimeMillis() - startBatchInsert,
affectedRows);
}
//批量插入节点以后开始修改节点表中的父节点ID
long costTime = System.currentTimeMillis();
int affectedLevelRows = nodesMapper.updateParentIdByPathHierarchy();
LOGGER.info("层级关系更新完成,影响 {} 行,总耗时 {} 毫秒", affectedLevelRows, costTime);
//接下来就是新增文件表 新增成功以后 通过路径获取节点表中的 custom3+路径 就是node ID
//处理文件夹新增到表结构
List<Files> filesList = new ArrayList<>();
long startBuildFiles = System.currentTimeMillis();
for (FileItemResult fileItem : filteredFiles) {
Files files = new Files();
files.setId(IdUtil.fastSimpleUUID());
files.setProjectId(id);
files.setNodeId("00");
files.setFileName(fileItem.getName());
files.setFilePath(ensurePathFormat(fileItem.getPath()));
// 获取文件大小字节
long fileSizeInBytes = fileItem.getSize();
// 转换为 MB 并保留两位小数
double fileSizeInMB = fileSizeInBytes / (1024.0 * 1024.0);
String fileSizeFormatted = String.format("%.2f", fileSizeInMB); // 保留两位小数
files.setFileSize(fileSizeFormatted);
files.setUploadTime(currentTime);
if (loginuser == null) {
files.setUploader(null);
} else {
files.setUploader(loginuser.getUsername());
}
filesList.add(files);
}
LOGGER.info("[构建文件列表] 耗时 {} ms | 待处理数量: {} 条",
System.currentTimeMillis() - startBuildFiles,
filesList.size());
// 批量插入文件表忽略重复
if (!filesList.isEmpty()) {
long startBatchInsertFiles = System.currentTimeMillis();
int affectedRowsFiles = filesMapper.batchInsertFiles(filesList);
LOGGER.info("[批量插入文件表] 耗时 {} ms | 实际新增数量: {} 条",
System.currentTimeMillis() - startBatchInsertFiles,
affectedRowsFiles);
}
//firstLayerData(fileItemList, id);
return "扫描完成";
}
// //获取数据库父节点为0的文件夹数据 通过所属项目ID和父节点查询
// List<Nodes> nodesList = nodesMapper.selectList(new LambdaQueryWrapper<Nodes>().eq(Nodes::getParentId, "00").eq(Nodes::getProjectId, id));
//
@ -697,12 +853,31 @@ public class NodesServiceImpl extends ServiceImpl<NodesMapper, Nodes> implements
//
// // 步骤 2筛选新增数据 找到需要新增到数据库的文件夹
// List<FileItemResult> fileItemNewList = fileItemList.stream().filter(fileItem -> !existingNodeNames.contains(fileItem.getName())).collect(Collectors.toList());
firstLayerData(fileItemList, id);
return "扫描完成";
/**
* 确保路径格式为以 "/" 开头和结尾例如 "/data/test/"
* 若路径为空或非法返回根路径 "/"
*/
private String ensurePathFormat(String path) {
if (StringUtils.isBlank(path)) {
return "/";
}
// 统一替换反斜杠为斜杠兼容 Windows 路径
String normalized = path.replaceAll("\\\\", "/");
// 去掉多余的斜杠
normalized = normalized.replaceAll("/{2,}", "/");
// 确保以 "/" 开头
if (!normalized.startsWith("/")) {
normalized = "/" + normalized;
}
// 确保以 "/" 结尾
if (!normalized.endsWith("/")) {
normalized += "/";
}
return normalized;
}
/**
* 第一层下面应该只有文件夹
*
@ -1098,13 +1273,13 @@ public class NodesServiceImpl extends ServiceImpl<NodesMapper, Nodes> implements
// 如果MD5值不相同则进行操作 //如果一致 则直接复制源文件到目标文件
if (StringUtils.isNoneEmpty(sourceLocalMD5, targetLocalMD5) && !sourceLocalMD5.equals(targetLocalMD5)) {
LOGGER.info("MD5值不一样的路径"+sourcePath);
LOGGER.info("MD5值不一样的替换路径"+targetPath);
LOGGER.info("MD5值不一样的路径" + sourcePath);
LOGGER.info("MD5值不一样的替换路径" + targetPath);
//拷贝文件到目标目录 将原来的文件名更改以及将数据库表结构更改
//更改目标路径下的文件名称
String newTargetPath = renameFile(sourcePath);
//将源目录文件 复制到 目标目录文件
copyToDirectoryFile(newTargetPath, value + targetFolderPath + "/");
copyToDirectoryFile(newTargetPath, value + targetFolderPath + "/");
} else {
//将源目录文件 复制到 目标目录文件 todo这个地方是覆盖

View File

@ -2,4 +2,16 @@
<!DOCTYPE mapper PUBLIC "-//mybatis.org//DTD Mapper 3.0//EN" "http://mybatis.org/dtd/mybatis-3-mapper.dtd">
<mapper namespace="com.yfd.platform.modules.specialDocument.mapper.FilesMapper">
<!-- 批量插入文件表(存在唯一键冲突时忽略) -->
<insert id="batchInsertFiles">
INSERT INTO sd_files
(id,project_id, node_id, file_name, file_path, file_size, upload_time, uploader)
VALUES
<foreach collection="list" item="item" separator=",">
(#{item.id},#{item.projectId}, #{item.nodeId}, #{item.fileName}, #{item.filePath},
#{item.fileSize}, #{item.uploadTime}, #{item.uploader})
</foreach>
</insert>
</mapper>

View File

@ -2,4 +2,26 @@
<!DOCTYPE mapper PUBLIC "-//mybatis.org//DTD Mapper 3.0//EN" "http://mybatis.org/dtd/mybatis-3-mapper.dtd">
<mapper namespace="com.yfd.platform.modules.specialDocument.mapper.NodesMapper">
<!-- 批量插入(存在唯一键冲突时忽略) -->
<insert id="batchInsertIgnore">
INSERT INTO sd_nodes
(id,project_id, parent_id, node_order, node_type, node_name, custom3, create_time)
VALUES
<foreach collection="list" item="item" separator=",">
(#{item.id},#{item.projectId}, #{item.parentId}, #{item.nodeOrder}, #{item.nodeType},
#{item.nodeName}, #{item.custom3}, #{item.createTime})
</foreach>
</insert>
<update id="updateParentIdByPathHierarchy">
UPDATE sd_nodes a
LEFT JOIN sd_nodes b
ON b.project_id = a.project_id
AND a.custom3 = CONCAT( b.custom3,b.node_name, '/' )
SET a.parent_id = b.id
WHERE
b.id IS NOT NULL
</update>
</mapper>